Compare commits
154 Commits
develop-20
...
hs/fix/mis
Author | SHA1 | Date | |
---|---|---|---|
![]() |
19ac70e149 | ||
![]() |
7d66063bd9 | ||
![]() |
47c6fb750a | ||
![]() |
8c3ac352b7 | ||
![]() |
d6ac16ca16 | ||
![]() |
75e37c6db5 | ||
![]() |
3f8dcfc6ed | ||
![]() |
07d4915e82 | ||
![]() |
77ff574d94 | ||
![]() |
5783f950cf | ||
![]() |
1c76c88f2c | ||
![]() |
50b56ee1ce | ||
![]() |
be521c441e | ||
![]() |
61ffb87757 | ||
![]() |
950b4c5847 | ||
![]() |
ac078f262d | ||
![]() |
fd62f0f3a8 | ||
![]() |
ca977ea9e1 | ||
![]() |
0d2c624bcb | ||
![]() |
765b6b7150 | ||
![]() |
a91f96292c | ||
![]() |
18487a45ed | ||
![]() |
29485e2125 | ||
![]() |
7674ea0b7d | ||
![]() |
693376ea97 | ||
![]() |
88bf2a8bcf | ||
![]() |
03e9ca0a76 | ||
![]() |
18399d0bd1 | ||
![]() |
3aabff77d7 | ||
![]() |
aa86342814 | ||
![]() |
170a276f18 | ||
![]() |
313524dc6d | ||
![]() |
5aae6e25a5 | ||
![]() |
b58a52b6ce | ||
![]() |
32760e2885 | ||
![]() |
125feb125c | ||
![]() |
8677063142 | ||
![]() |
f015b18230 | ||
![]() |
aa9e610fa6 | ||
![]() |
7d62045c30 | ||
![]() |
5b03173b99 | ||
![]() |
36fcdb8cfa | ||
![]() |
7d5b17fbf2 | ||
![]() |
d6e3292955 | ||
![]() |
60f54df964 | ||
![]() |
487df807cc | ||
![]() |
cacdf84964 | ||
![]() |
e2293c758f | ||
![]() |
f5a275adf5 | ||
![]() |
615ced32cd | ||
![]() |
bc04d963e5 | ||
![]() |
11051ce5c7 | ||
![]() |
631bddc52e | ||
![]() |
b5f40aa7fb | ||
![]() |
57e0798af2 | ||
![]() |
0161b662f7 | ||
![]() |
aa55b19680 | ||
![]() |
8cfffd88fa | ||
![]() |
2f8dcb8097 | ||
![]() |
5b70fa8cc8 | ||
![]() |
b4025e89ed | ||
![]() |
8db74e1b2f | ||
![]() |
1fcfbadba7 | ||
![]() |
13ec35873f | ||
![]() |
f96b6eac2b | ||
![]() |
933a1a5cd9 | ||
![]() |
b2b9914efc | ||
![]() |
9ce9596981 | ||
![]() |
fc30fe1f6b | ||
![]() |
25a4b98359 | ||
![]() |
05c34b7312 | ||
![]() |
b22842af56 | ||
![]() |
0bef028692 | ||
![]() |
935facd069 | ||
![]() |
87e5255bbc | ||
![]() |
b42f0d793d | ||
![]() |
ccca0d3354 | ||
![]() |
9699bbc7b9 | ||
![]() |
c7e251de9f | ||
![]() |
d788b15529 | ||
![]() |
8e7489bc17 | ||
![]() |
d234df62d7 | ||
![]() |
4a5922a0ec | ||
![]() |
5bd184aaaf | ||
![]() |
464c3b96fa | ||
![]() |
60544a4e84 | ||
![]() |
a664d98f37 | ||
![]() |
0e3d7efb0f | ||
![]() |
a8cd0b99f3 | ||
![]() |
a43df598a1 | ||
![]() |
a7163cd0fa | ||
![]() |
fe171a560b | ||
![]() |
24abc3294a | ||
![]() |
2dea0073b2 | ||
![]() |
31ecefbfd2 | ||
![]() |
7363047b82 | ||
![]() |
12fe7aef65 | ||
![]() |
5da4f18188 | ||
![]() |
61c54ed28b | ||
![]() |
677caec3c6 | ||
![]() |
b914bd6638 | ||
![]() |
3caa3132aa | ||
![]() |
dbd531112c | ||
![]() |
ae5e121502 | ||
![]() |
929cfc8e5a | ||
![]() |
bad28e7f9f | ||
![]() |
3d63fe91b0 | ||
![]() |
95af020310 | ||
![]() |
2147b9d95e | ||
![]() |
68636e7c19 | ||
![]() |
f56675648a | ||
![]() |
3a219d114d | ||
![]() |
3cefa7047c | ||
![]() |
35013773ba | ||
![]() |
e28379e98b | ||
![]() |
93329d7f99 | ||
![]() |
9e508b0321 | ||
![]() |
2c26c429a7 | ||
![]() |
1cc63e2b7c | ||
![]() |
4e311a22d0 | ||
![]() |
3ad99d75f9 | ||
![]() |
b79c01077d | ||
![]() |
4385f36b8d | ||
![]() |
a85f1cfa4b | ||
![]() |
13524fa8ed | ||
![]() |
738c73975e | ||
![]() |
bf9d72f87b | ||
![]() |
674cca3c4a | ||
![]() |
7a95e2beb5 | ||
![]() |
5ab71814a9 | ||
![]() |
e783a2851d | ||
![]() |
29e3a28071 | ||
![]() |
4e7a5e9362 | ||
![]() |
89d1dfa340 | ||
![]() |
974abc8067 | ||
![]() |
2f9ad5f34d | ||
![]() |
9555ceeb8a | ||
![]() |
6cd74efa90 | ||
![]() |
3b3735a2cc | ||
![]() |
2ffbc0d053 | ||
![]() |
a92419ffe4 | ||
![]() |
92c16d085f | ||
![]() |
c94024d51d | ||
![]() |
11915ca568 | ||
![]() |
4729b6e837 | ||
![]() |
2f1978cf2f | ||
![]() |
d4045c1ef3 | ||
![]() |
a0f8aaf4e7 | ||
![]() |
b7a5e9ca03 | ||
![]() |
7e4b8aa020 | ||
![]() |
f5aa15034e | ||
![]() |
f210be30d8 | ||
![]() |
c63741a089 | ||
![]() |
4c99ffd81f |
@@ -1,7 +1,7 @@
|
|||||||
black==25.1.0
|
black==25.1.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.2
|
flake8==7.1.2
|
||||||
isort==6.0.0
|
isort==6.0.1
|
||||||
mypy==1.11.2
|
mypy==1.15.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.17.0.20250304
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,7 +201,6 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
*_archive
|
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
@@ -54,9 +54,15 @@ concretizer:
|
|||||||
# Regular packages
|
# Regular packages
|
||||||
cmake: 2
|
cmake: 2
|
||||||
gmake: 2
|
gmake: 2
|
||||||
|
python: 2
|
||||||
|
python-venv: 2
|
||||||
py-cython: 2
|
py-cython: 2
|
||||||
py-flit-core: 2
|
py-flit-core: 2
|
||||||
|
py-pip: 2
|
||||||
py-setuptools: 2
|
py-setuptools: 2
|
||||||
|
py-wheel: 2
|
||||||
|
xcb-proto: 2
|
||||||
|
# Compilers
|
||||||
gcc: 2
|
gcc: 2
|
||||||
llvm: 2
|
llvm: 2
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
|
@@ -1761,19 +1761,24 @@ Verifying installations
|
|||||||
The ``spack verify`` command can be used to verify the validity of
|
The ``spack verify`` command can be used to verify the validity of
|
||||||
Spack-installed packages any time after installation.
|
Spack-installed packages any time after installation.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``spack verify manifest``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
At installation time, Spack creates a manifest of every file in the
|
At installation time, Spack creates a manifest of every file in the
|
||||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||||
destination. For directories, Spack tracks the mode, and
|
destination. For directories, Spack tracks the mode, and
|
||||||
ownership. For files, Spack tracks the mode, ownership, modification
|
ownership. For files, Spack tracks the mode, ownership, modification
|
||||||
time, hash, and size. The Spack verify command will check, for every
|
time, hash, and size. The ``spack verify manifest`` command will check,
|
||||||
file in each package, whether any of those attributes have changed. It
|
for every file in each package, whether any of those attributes have
|
||||||
will also check for newly added files or deleted files from the
|
changed. It will also check for newly added files or deleted files from
|
||||||
installation prefix. Spack can either check all installed packages
|
the installation prefix. Spack can either check all installed packages
|
||||||
using the `-a,--all` or accept specs listed on the command line to
|
using the `-a,--all` or accept specs listed on the command line to
|
||||||
verify.
|
verify.
|
||||||
|
|
||||||
The ``spack verify`` command can also verify for individual files that
|
The ``spack verify manifest`` command can also verify for individual files
|
||||||
they haven't been altered since installation time. If the given file
|
that they haven't been altered since installation time. If the given file
|
||||||
is not in a Spack installation prefix, Spack will report that it is
|
is not in a Spack installation prefix, Spack will report that it is
|
||||||
not owned by any package. To check individual files instead of specs,
|
not owned by any package. To check individual files instead of specs,
|
||||||
use the ``-f,--files`` option.
|
use the ``-f,--files`` option.
|
||||||
@@ -1788,6 +1793,22 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``spack verify libraries``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``spack verify libraries`` command can be used to verify that packages
|
||||||
|
do not have accidental system dependencies. This command scans the install
|
||||||
|
prefixes of packages for executables and shared libraries, and resolves
|
||||||
|
their needed libraries in their RPATHs. When needed libraries cannot be
|
||||||
|
located, an error is reported. This typically indicates that a package
|
||||||
|
was linked against a system library, instead of a library provided by
|
||||||
|
a Spack package.
|
||||||
|
|
||||||
|
This verification can also be enabled as a post-install hook by setting
|
||||||
|
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
||||||
|
in :ref:`config.yaml <config-yaml>`.
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
@@ -223,6 +223,10 @@ def setup(sphinx):
|
|||||||
("py:class", "spack.compiler.CompilerCache"),
|
("py:class", "spack.compiler.CompilerCache"),
|
||||||
# TypeVar that is not handled correctly
|
# TypeVar that is not handled correctly
|
||||||
("py:class", "llnl.util.lang.T"),
|
("py:class", "llnl.util.lang.T"),
|
||||||
|
("py:class", "llnl.util.lang.KT"),
|
||||||
|
("py:class", "llnl.util.lang.VT"),
|
||||||
|
("py:obj", "llnl.util.lang.KT"),
|
||||||
|
("py:obj", "llnl.util.lang.VT"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
@@ -125,6 +125,8 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
|
.. _Misc Cache:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@@ -334,3 +336,52 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
|
-------------------------------
|
||||||
|
``concretization_cache:enable``
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||||
|
successful concretization runs. When enabled, Spack will check the concretization
|
||||||
|
cache prior to running the solver. If a previous request to solve a given
|
||||||
|
problem is present in the cache, Spack will load the concrete specs and other
|
||||||
|
solver data from the cache rather than running the solver. Specs not previously
|
||||||
|
concretized will be added to the cache on a successful solve. The cache additionally
|
||||||
|
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||||
|
about the run that produced a given solver result.
|
||||||
|
|
||||||
|
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||||
|
Cache is cleaned.
|
||||||
|
|
||||||
|
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||||
|
|
||||||
|
----------------------------
|
||||||
|
``concretization_cache:url``
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||||
|
paths on the local filesystem.
|
||||||
|
|
||||||
|
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||||
|
|
||||||
|
------------------------------------
|
||||||
|
``concretization_cache:entry_limit``
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
``concretization_cache:size_limit``
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
@@ -14,6 +14,7 @@ case you want to skip directly to specific docs:
|
|||||||
* :ref:`compilers.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
|
* :ref:`include.yaml <include-yaml>`
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
|
@@ -670,24 +670,45 @@ This configuration sets the default compiler for all packages to
|
|||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml
|
Spack environments allow an ``include`` heading in their yaml schema.
|
||||||
schema. This heading pulls in external configuration files and applies
|
This heading pulls in external configuration files and applies them to
|
||||||
them to the environment.
|
the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- relative/path/to/config.yaml
|
- environment/relative/path/to/config.yaml
|
||||||
- https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
|
- path: /path/to/$os/$target/environment
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||||
|
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||||
|
|
||||||
|
Files are listed using paths to individual files or directories containing them.
|
||||||
|
Path entries may be absolute or relative to the environment or specified as
|
||||||
|
URLs. URLs to individual files need link to the **raw** form of the file's
|
||||||
|
contents (e.g., `GitHub
|
||||||
|
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||||
|
or `GitLab
|
||||||
|
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||||
|
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||||
|
supported. Spack-specific, environment and user path variables can be used.
|
||||||
|
(See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
||||||
|
|
||||||
Environments can include files or URLs. File paths can be relative or
|
|
||||||
absolute. URLs include the path to the text for individual files or
|
|
||||||
can be the path to a directory containing configuration files.
|
|
||||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
|
||||||
schemes). Spack-specific, environment and user path variables may be
|
|
||||||
used in these paths. See :ref:`config-file-variables` for more information.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
|
51
lib/spack/docs/include_yaml.rst
Normal file
51
lib/spack/docs/include_yaml.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _include-yaml:
|
||||||
|
|
||||||
|
===============================
|
||||||
|
Include Settings (include.yaml)
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Spack allows you to include configuration files through ``include.yaml``.
|
||||||
|
Using the ``include:`` heading results in pulling in external configuration
|
||||||
|
information to be used by any Spack command.
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||||
|
example,
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
include:
|
||||||
|
- /path/to/a/required/config.yaml
|
||||||
|
- path: /path/to/$os/$target/config
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||||
|
indicates that included ``config.yaml`` file is required (so must exist).
|
||||||
|
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||||
|
the path is only included if it exists. The condition ``os == "ventura"``
|
||||||
|
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||||
|
path is only included when the operating system (``os``) is ``ventura``.
|
||||||
|
|
||||||
|
The same conditions and variables in `Spec List References
|
||||||
|
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||||
|
can be used for conditional activation in the ``when`` clauses.
|
||||||
|
|
||||||
|
Included files can be specified by path or by their parent directory.
|
||||||
|
Paths may be absolute, relative (to the configuration file including the path),
|
||||||
|
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||||
|
schemes) are supported. Spack-specific, environment and user path variables
|
||||||
|
can be used. (See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
@@ -71,6 +71,7 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
|
include_yaml
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.2.0
|
sphinx==8.2.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.27.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.4
|
pytest==8.3.5
|
||||||
isort==6.0.0
|
isort==6.0.1
|
||||||
black==25.1.0
|
black==25.1.0
|
||||||
flake8==7.1.2
|
flake8==7.1.2
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
@@ -7,6 +7,7 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@@ -20,6 +21,7 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
|
IO,
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@@ -2454,26 +2456,69 @@ class WindowsSimulatedRPath:
|
|||||||
and vis versa.
|
and vis versa.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, package, link_install_prefix=True):
|
def __init__(
|
||||||
|
self,
|
||||||
|
package,
|
||||||
|
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||||
|
link_install_prefix: bool = True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
package (spack.package_base.PackageBase): Package requiring links
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
|
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||||
|
the root directory in which to establish the simulated rpath, ie where the
|
||||||
|
symlinks that comprise the "rpath" behavior will be installed.
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||||
|
both is an error.
|
||||||
|
|
||||||
|
Default: None
|
||||||
link_install_prefix (bool): Link against package's own install or stage root.
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
Packages that run their own executables during build and require rpaths to
|
Packages that run their own executables during build and require rpaths to
|
||||||
the build directory during build time require this option. Default: install
|
the build directory during build time require this option.
|
||||||
|
|
||||||
|
Default: install
|
||||||
root
|
root
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||||
|
both is an error.
|
||||||
"""
|
"""
|
||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths = set()
|
self._addl_rpaths: set[str] = set()
|
||||||
|
if link_install_prefix and base_modification_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||||
|
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||||
|
" or specify a `base_modification_prefix` for any other link type. "
|
||||||
|
"Specifying both arguments is invalid."
|
||||||
|
)
|
||||||
|
if not (link_install_prefix or base_modification_prefix):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||||
|
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||||
|
" or base_modification_prefix to be specified."
|
||||||
|
" Neither was provided."
|
||||||
|
)
|
||||||
|
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
self._additional_library_dependents = set()
|
if base_modification_prefix:
|
||||||
|
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||||
|
else:
|
||||||
|
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||||
|
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||||
|
if not self.link_install_prefix:
|
||||||
|
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def library_dependents(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
base_pths = set()
|
||||||
|
if self.link_install_prefix:
|
||||||
|
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||||
|
base_pths |= self._additional_library_dependents
|
||||||
|
return base_pths
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2489,6 +2534,12 @@ def add_library_dependent(self, *dest):
|
|||||||
new_pth = pathlib.Path(pth).parent
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
new_pth = pathlib.Path(pth)
|
||||||
|
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||||
|
if not path_is_in_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Attempting to generate rpath symlink out of rpath context:\
|
||||||
|
{str(self.base_modification_prefix)}"
|
||||||
|
)
|
||||||
self._additional_library_dependents.add(new_pth)
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2577,6 +2628,33 @@ def establish_link(self):
|
|||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||||
|
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||||
|
so an executable can test the libraries/executables with proper access
|
||||||
|
to dependent dlls
|
||||||
|
|
||||||
|
Note: this is a no-op on all other platforms besides Windows
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||||
|
test_dir: the testing directory in which we should construct an rpath
|
||||||
|
"""
|
||||||
|
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||||
|
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||||
|
# add the testing directory as a location to install rpath symlinks
|
||||||
|
mini_rpath.add_library_dependent(test_dir)
|
||||||
|
|
||||||
|
# check for whether build_directory is available, if not
|
||||||
|
# assume the stage root is the build dir
|
||||||
|
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||||
|
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||||
|
# add the build dir & build dir bin
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||||
|
# construct rpath
|
||||||
|
mini_rpath.establish_link()
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
@@ -2805,6 +2883,20 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temporary_file_position(stream):
|
||||||
|
orig_pos = stream.tell()
|
||||||
|
yield
|
||||||
|
stream.seek(orig_pos)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||||
|
with temporary_file_position(stream):
|
||||||
|
stream.seek(loc, relative_to)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
@@ -11,10 +11,11 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import types
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -707,14 +708,24 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Simple wrapper for lazily initialized singleton objects."""
|
"""Wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory):
|
def __init__(self, factory: Callable[[], object]):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
|
Most factories will simply create the object to be initialized and
|
||||||
|
return it.
|
||||||
|
|
||||||
|
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||||
|
may need to be initialized incrementally. If the factory returns a generator
|
||||||
|
instead of a regular object, the singleton will assign each result yielded by
|
||||||
|
the generator to the singleton instance. This allows methods called by
|
||||||
|
the factory in later stages to refer back to the singleton.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that
|
factory (function): function taking no arguments that creates the
|
||||||
creates the singleton instance.
|
singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -722,7 +733,16 @@ def __init__(self, factory):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
self._instance = self.factory()
|
instance = self.factory()
|
||||||
|
|
||||||
|
if isinstance(instance, types.GeneratorType):
|
||||||
|
# if it's a generator, assign every value
|
||||||
|
for value in instance:
|
||||||
|
self._instance = value
|
||||||
|
else:
|
||||||
|
# if not, just assign the result like a normal singleton
|
||||||
|
self._instance = instance
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -1080,3 +1100,88 @@ def __set__(self, instance, value):
|
|||||||
|
|
||||||
def factory(self, instance, owner):
|
def factory(self, instance, owner):
|
||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
|
||||||
|
KT = TypeVar("KT")
|
||||||
|
VT = TypeVar("VT")
|
||||||
|
|
||||||
|
|
||||||
|
class PriorityOrderedMapping(Mapping[KT, VT]):
|
||||||
|
"""Mapping that iterates over key according to an integer priority. If the priority is
|
||||||
|
the same for two keys, insertion order is what matters.
|
||||||
|
|
||||||
|
The priority is set when the key/value pair is added. If not set, the highest current priority
|
||||||
|
is used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_data: Dict[KT, VT]
|
||||||
|
_priorities: List[Tuple[int, KT]]
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._data = {}
|
||||||
|
# Tuple of (priority, key)
|
||||||
|
self._priorities = []
|
||||||
|
|
||||||
|
def __getitem__(self, key: KT) -> VT:
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
yield from (key for _, key in self._priorities)
|
||||||
|
|
||||||
|
def __reversed__(self):
|
||||||
|
yield from (key for _, key in reversed(self._priorities))
|
||||||
|
|
||||||
|
def reversed_keys(self):
|
||||||
|
"""Iterates over keys from the highest priority, to the lowest."""
|
||||||
|
return reversed(self)
|
||||||
|
|
||||||
|
def reversed_values(self):
|
||||||
|
"""Iterates over values from the highest priority, to the lowest."""
|
||||||
|
yield from (self._data[key] for _, key in reversed(self._priorities))
|
||||||
|
|
||||||
|
def _highest_priority(self) -> int:
|
||||||
|
if not self._priorities:
|
||||||
|
return 0
|
||||||
|
result, _ = self._priorities[-1]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
||||||
|
"""Adds a key/value pair to the mapping, with a specific priority.
|
||||||
|
|
||||||
|
If the priority is None, then it is assumed to be the highest priority value currently
|
||||||
|
in the container.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: when the same priority is already in the mapping
|
||||||
|
"""
|
||||||
|
if priority is None:
|
||||||
|
priority = self._highest_priority()
|
||||||
|
|
||||||
|
if key in self._data:
|
||||||
|
self.remove(key)
|
||||||
|
|
||||||
|
self._priorities.append((priority, key))
|
||||||
|
# We rely on sort being stable
|
||||||
|
self._priorities.sort(key=lambda x: x[0])
|
||||||
|
self._data[key] = value
|
||||||
|
assert len(self._data) == len(self._priorities)
|
||||||
|
|
||||||
|
def remove(self, key: KT) -> VT:
|
||||||
|
"""Removes a key from the mapping.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The value associated with the key being removed
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: if the key is not in the mapping
|
||||||
|
"""
|
||||||
|
if key not in self._data:
|
||||||
|
raise KeyError(f"cannot find {key}")
|
||||||
|
|
||||||
|
popped_item = self._data.pop(key)
|
||||||
|
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
||||||
|
assert len(self._data) == len(self._priorities)
|
||||||
|
return popped_item
|
||||||
|
@@ -13,6 +13,18 @@
|
|||||||
__version__ = "1.0.0.dev0"
|
__version__ = "1.0.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||||
|
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||||
|
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||||
|
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||||
|
#: Spack version.
|
||||||
|
package_api_version = (1, 0)
|
||||||
|
|
||||||
|
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||||
|
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||||
|
#: compatibility with vX.0.
|
||||||
|
min_package_api_version = (1, 0)
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -79,4 +91,6 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
|
"package_api_version",
|
||||||
|
"min_package_api_version",
|
||||||
]
|
]
|
||||||
|
@@ -528,7 +528,6 @@ def __call__(self, parser, namespace, values, option_string):
|
|||||||
# the const from the constructor or a value from the CLI.
|
# the const from the constructor or a value from the CLI.
|
||||||
# Note that this is only called if the argument is actually
|
# Note that this is only called if the argument is actually
|
||||||
# specified on the command line.
|
# specified on the command line.
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -350,9 +350,12 @@ def _config_change(config_path, match_spec_str=None):
|
|||||||
if spack.config.get(key_path, scope=scope):
|
if spack.config.get(key_path, scope=scope):
|
||||||
ideal_scope_to_modify = scope
|
ideal_scope_to_modify = scope
|
||||||
break
|
break
|
||||||
|
# If we find our key in a specific scope, that's the one we want
|
||||||
|
# to modify. Otherwise we use the default write scope.
|
||||||
|
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
||||||
|
|
||||||
update_path = f"{key_path}:[{str(spec)}]"
|
update_path = f"{key_path}:[{str(spec)}]"
|
||||||
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
spack.config.add(update_path, scope=write_scope)
|
||||||
else:
|
else:
|
||||||
raise ValueError("'config change' can currently only change 'require' sections")
|
raise ValueError("'config change' can currently only change 'require' sections")
|
||||||
|
|
||||||
|
@@ -2,35 +2,48 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import argparse
|
import argparse
|
||||||
|
import io
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.string import plural
|
||||||
|
from llnl.util.filesystem import visit_directory_tree
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.verify
|
import spack.verify
|
||||||
|
import spack.verify_libraries
|
||||||
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "check that all spack packages are on disk as installed"
|
description = "verify spack installations on disk"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
|
||||||
|
|
||||||
def setup_parser(subparser):
|
|
||||||
setup_parser.parser = subparser
|
|
||||||
|
|
||||||
subparser.add_argument(
|
def setup_parser(subparser: argparse.ArgumentParser):
|
||||||
|
global MANIFEST_SUBPARSER
|
||||||
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
|
||||||
|
|
||||||
|
MANIFEST_SUBPARSER = sp.add_parser(
|
||||||
|
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
|
||||||
|
)
|
||||||
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||||
)
|
)
|
||||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||||
subparser.add_argument(
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
type = subparser.add_mutually_exclusive_group()
|
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
|
||||||
type.add_argument(
|
manifest_sp_type.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -39,7 +52,7 @@ def setup_parser(subparser):
|
|||||||
default="specs",
|
default="specs",
|
||||||
help="treat entries as specs (default)",
|
help="treat entries as specs (default)",
|
||||||
)
|
)
|
||||||
type.add_argument(
|
manifest_sp_type.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--files",
|
"--files",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -49,14 +62,67 @@ def setup_parser(subparser):
|
|||||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
libraries_subparser = sp.add_parser(
|
||||||
|
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
|
||||||
|
)
|
||||||
|
|
||||||
|
arguments.add_common_arguments(libraries_subparser, ["constraint"])
|
||||||
|
|
||||||
|
|
||||||
def verify(parser, args):
|
def verify(parser, args):
|
||||||
|
cmd = args.verify_command
|
||||||
|
if cmd == "libraries":
|
||||||
|
return verify_libraries(args)
|
||||||
|
elif cmd == "manifest":
|
||||||
|
return verify_manifest(args)
|
||||||
|
parser.error("invalid verify subcommand")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_libraries(args):
|
||||||
|
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
|
||||||
|
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
|
||||||
|
|
||||||
|
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
for spec in specs_from_db:
|
||||||
|
try:
|
||||||
|
pkg = spec.package
|
||||||
|
except Exception:
|
||||||
|
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
|
||||||
|
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
|
||||||
|
if error_msg is not None:
|
||||||
|
errors += 1
|
||||||
|
tty.error(error_msg)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
|
||||||
|
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
|
||||||
|
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
||||||
|
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
|
||||||
|
)
|
||||||
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
|
if not visitor.problems:
|
||||||
|
return None
|
||||||
|
|
||||||
|
output = io.StringIO()
|
||||||
|
visitor.write(output, indent=4, brief=True)
|
||||||
|
message = output.getvalue().rstrip()
|
||||||
|
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
|
||||||
|
|
||||||
|
|
||||||
|
def verify_manifest(args):
|
||||||
|
"""verify that install directories have not been modified since installation"""
|
||||||
local = args.local
|
local = args.local
|
||||||
|
|
||||||
if args.type == "files":
|
if args.type == "files":
|
||||||
if args.all:
|
if args.all:
|
||||||
setup_parser.parser.print_help()
|
MANIFEST_SUBPARSER.error("cannot use --all with --files")
|
||||||
return 1
|
|
||||||
|
|
||||||
for file in args.specs_or_files:
|
for file in args.specs_or_files:
|
||||||
results = spack.verify.check_file_manifest(file)
|
results = spack.verify.check_file_manifest(file)
|
||||||
@@ -87,8 +153,7 @@ def verify(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
||||||
else:
|
else:
|
||||||
setup_parser.parser.print_help()
|
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
|
||||||
return 1
|
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug("Verifying package %s")
|
tty.debug("Verifying package %s")
|
||||||
|
@@ -32,9 +32,10 @@
|
|||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
@@ -42,7 +43,6 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
import spack.schema.cdash
|
import spack.schema.cdash
|
||||||
@@ -54,24 +54,27 @@
|
|||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
|
import spack.schema.include
|
||||||
|
import spack.schema.merged
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
import spack.schema.view
|
import spack.schema.view
|
||||||
|
import spack.util.remote_file_cache as rfc_util
|
||||||
# Hacked yaml for configuration files preserves line numbers.
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.web as web_util
|
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
|
|
||||||
|
from .enums import ConfigScopePriority
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
SECTION_SCHEMAS: Dict[str, Any] = {
|
SECTION_SCHEMAS: Dict[str, Any] = {
|
||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
"env_vars": spack.schema.env_vars.schema,
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
|
"include": spack.schema.include.schema,
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -119,6 +122,17 @@
|
|||||||
#: Type used for raw YAML configuration
|
#: Type used for raw YAML configuration
|
||||||
YamlConfigDict = Dict[str, Any]
|
YamlConfigDict = Dict[str, Any]
|
||||||
|
|
||||||
|
#: prefix for name of included configuration scopes
|
||||||
|
INCLUDE_SCOPE_PREFIX = "include"
|
||||||
|
|
||||||
|
#: safeguard for recursive includes -- maximum include depth
|
||||||
|
MAX_RECURSIVE_INCLUDES = 100
|
||||||
|
|
||||||
|
|
||||||
|
def _include_cache_location():
|
||||||
|
"""Location to cache included configuration files."""
|
||||||
|
return os.path.join(spack.paths.user_cache_path, "includes")
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str) -> None:
|
||||||
@@ -126,6 +140,9 @@ def __init__(self, name: str) -> None:
|
|||||||
self.writable = False
|
self.writable = False
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
|
#: names of any included scopes
|
||||||
|
self.included_scopes: List[str] = []
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -408,26 +425,18 @@ def _method(self, *args, **kwargs):
|
|||||||
return _method
|
return _method
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
|
||||||
"""A full Spack configuration, from a hierarchy of config files.
|
ScopeWithPriority = Tuple[int, ConfigScope]
|
||||||
|
|
||||||
This class makes it easy to add a new scope on top of an existing one.
|
|
||||||
"""
|
class Configuration:
|
||||||
|
"""A hierarchical configuration, merging a number of scopes at different priorities."""
|
||||||
|
|
||||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||||
scopes: Dict[str, ConfigScope]
|
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
|
||||||
|
|
||||||
def __init__(self, *scopes: ConfigScope) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize a configuration with an initial list of scopes.
|
self.scopes = lang.PriorityOrderedMapping()
|
||||||
|
|
||||||
Args:
|
|
||||||
scopes: list of scopes to add to this
|
|
||||||
Configuration, ordered from lowest to highest precedence
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.scopes = collections.OrderedDict()
|
|
||||||
for scope in scopes:
|
|
||||||
self.push_scope(scope)
|
|
||||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||||
|
|
||||||
def ensure_unwrapped(self) -> "Configuration":
|
def ensure_unwrapped(self) -> "Configuration":
|
||||||
@@ -435,36 +444,64 @@ def ensure_unwrapped(self) -> "Configuration":
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def highest(self) -> ConfigScope:
|
def highest(self) -> ConfigScope:
|
||||||
"""Scope with highest precedence"""
|
"""Scope with the highest precedence"""
|
||||||
return next(reversed(self.scopes.values())) # type: ignore
|
return next(self.scopes.reversed_values()) # type: ignore
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def ensure_scope_ordering(self):
|
def push_scope(
|
||||||
"""Ensure that scope order matches documented precedent"""
|
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
||||||
# FIXME: We also need to consider that custom configurations and other orderings
|
) -> None:
|
||||||
# may not be preserved correctly
|
"""Adds a scope to the Configuration, at a given priority.
|
||||||
if "command_line" in self.scopes:
|
|
||||||
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
|
||||||
self.scopes["command_line"] = self.remove_scope("command_line")
|
|
||||||
|
|
||||||
@_config_mutator
|
If a priority is not given, it is assumed to be the current highest priority.
|
||||||
def push_scope(self, scope: ConfigScope) -> None:
|
|
||||||
"""Add a higher precedence scope to the Configuration."""
|
|
||||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
|
||||||
self.scopes[scope.name] = scope
|
|
||||||
|
|
||||||
@_config_mutator
|
Args:
|
||||||
def pop_scope(self) -> ConfigScope:
|
scope: scope to be added
|
||||||
"""Remove the highest precedence scope and return it."""
|
priority: priority of the scope
|
||||||
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
"""
|
||||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
||||||
return scope
|
|
||||||
|
# TODO: As a follow on to #48784, change this to create a graph of the
|
||||||
|
# TODO: includes AND ensure properly sorted such that the order included
|
||||||
|
# TODO: at the highest level is reflected in the value of an option that
|
||||||
|
# TODO: is set in multiple included files.
|
||||||
|
# before pushing the scope itself, push any included scopes recursively, at same priority
|
||||||
|
includes = scope.get_section("include")
|
||||||
|
if includes:
|
||||||
|
include_paths = [included_path(data) for data in includes["include"]]
|
||||||
|
for path in reversed(include_paths):
|
||||||
|
included_scope = include_path_scope(path)
|
||||||
|
if not included_scope:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
||||||
|
mark = path.path._start_mark if syaml.marked(path.path) else "" # type: ignore
|
||||||
|
raise RecursiveIncludeError(
|
||||||
|
f"Maximum include recursion exceeded in {path.path}", str(mark)
|
||||||
|
)
|
||||||
|
|
||||||
|
# record this inclusion so that remove_scope() can use it
|
||||||
|
scope.included_scopes.append(included_scope.name)
|
||||||
|
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
||||||
|
|
||||||
|
self.scopes.add(scope.name, value=scope, priority=priority)
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
||||||
scope = self.scopes.pop(scope_name, None)
|
try:
|
||||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
scope = self.scopes.remove(scope_name)
|
||||||
|
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
||||||
|
except KeyError as e:
|
||||||
|
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# transitively remove included scopes
|
||||||
|
for inc in scope.included_scopes:
|
||||||
|
assert inc in self.scopes, f"Included scope '{inc}' was never added to configuration!"
|
||||||
|
self.remove_scope(inc)
|
||||||
|
scope.included_scopes.clear() # clean up includes for bookkeeping
|
||||||
|
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -473,15 +510,13 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
|||||||
return (s for s in self.scopes.values() if s.writable)
|
return (s for s in self.scopes.values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
"""Writable scope with highest precedence."""
|
"""Writable scope with the highest precedence."""
|
||||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
return next(s for s in self.scopes.reversed_values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||||
"""Writable non-platform scope with highest precedence"""
|
"""Writable non-platform scope with the highest precedence"""
|
||||||
return next(
|
return next(
|
||||||
s
|
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
|
||||||
for s in reversed(self.scopes.values()) # type: ignore
|
|
||||||
if s.writable and not s.is_platform_dependent
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||||
@@ -748,7 +783,7 @@ def override(
|
|||||||
"""
|
"""
|
||||||
if isinstance(path_or_scope, ConfigScope):
|
if isinstance(path_or_scope, ConfigScope):
|
||||||
overrides = path_or_scope
|
overrides = path_or_scope
|
||||||
CONFIG.push_scope(path_or_scope)
|
CONFIG.push_scope(path_or_scope, priority=None)
|
||||||
else:
|
else:
|
||||||
base_name = _OVERRIDES_BASE_NAME
|
base_name = _OVERRIDES_BASE_NAME
|
||||||
# Ensure the new override gets a unique scope name
|
# Ensure the new override gets a unique scope name
|
||||||
@@ -762,7 +797,7 @@ def override(
|
|||||||
break
|
break
|
||||||
|
|
||||||
overrides = InternalConfigScope(scope_name)
|
overrides = InternalConfigScope(scope_name)
|
||||||
CONFIG.push_scope(overrides)
|
CONFIG.push_scope(overrides, priority=None)
|
||||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -772,13 +807,86 @@ def override(
|
|||||||
assert scope is overrides
|
assert scope is overrides
|
||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
def _add_platform_scope(
|
||||||
|
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
||||||
|
) -> None:
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
|
import spack.platforms # circular dependency
|
||||||
|
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
scope = DirectoryConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||||
)
|
)
|
||||||
cfg.push_scope(scope)
|
cfg.push_scope(scope, priority=priority)
|
||||||
|
|
||||||
|
|
||||||
|
#: Class for the relevance of an optional path conditioned on a limited
|
||||||
|
#: python code that evaluates to a boolean and or explicit specification
|
||||||
|
#: as optional.
|
||||||
|
class IncludePath(NamedTuple):
|
||||||
|
path: str
|
||||||
|
when: str
|
||||||
|
sha256: str
|
||||||
|
optional: bool
|
||||||
|
|
||||||
|
|
||||||
|
def included_path(entry: Union[str, dict]) -> IncludePath:
|
||||||
|
"""Convert the included path entry into an IncludePath.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entry: include configuration entry
|
||||||
|
|
||||||
|
Returns: converted entry, where an empty ``when`` means the path is
|
||||||
|
not conditionally included
|
||||||
|
"""
|
||||||
|
if isinstance(entry, str):
|
||||||
|
return IncludePath(path=entry, sha256="", when="", optional=False)
|
||||||
|
|
||||||
|
path = entry["path"]
|
||||||
|
sha256 = entry.get("sha256", "")
|
||||||
|
when = entry.get("when", "")
|
||||||
|
optional = entry.get("optional", False)
|
||||||
|
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
||||||
|
|
||||||
|
|
||||||
|
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
||||||
|
"""Instantiate an appropriate configuration scope for the given path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include: optional include path
|
||||||
|
|
||||||
|
Returns: configuration scope
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: included path has an unsupported URL scheme, is required
|
||||||
|
but does not exist; configuration stage directory argument is missing
|
||||||
|
ConfigFileError: unable to access remote configuration file(s)
|
||||||
|
"""
|
||||||
|
# circular dependencies
|
||||||
|
import spack.spec
|
||||||
|
|
||||||
|
if (not include.when) or spack.spec.eval_conditional(include.when):
|
||||||
|
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
||||||
|
if not config_path:
|
||||||
|
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
||||||
|
|
||||||
|
if os.path.isdir(config_path):
|
||||||
|
# directories are treated as regular ConfigScopes
|
||||||
|
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
||||||
|
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||||
|
return DirectoryConfigScope(config_name, config_path)
|
||||||
|
|
||||||
|
if os.path.exists(config_path):
|
||||||
|
# files are assumed to be SingleFileScopes
|
||||||
|
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
||||||
|
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||||
|
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
||||||
|
|
||||||
|
if not include.optional:
|
||||||
|
path = f" at ({config_path})" if config_path != include.path else ""
|
||||||
|
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
@@ -806,18 +914,17 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
def create_incremental() -> Generator[Configuration, None, None]:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
This constructs one instance associated with this module and returns
|
This constructs one instance associated with this module and returns
|
||||||
it. It is bundled inside a function so that configuration can be
|
it. It is bundled inside a function so that configuration can be
|
||||||
initialized lazily.
|
initialized lazily.
|
||||||
"""
|
"""
|
||||||
cfg = Configuration()
|
|
||||||
|
|
||||||
# first do the builtin, hardcoded defaults
|
# first do the builtin, hardcoded defaults
|
||||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
cfg = create_from(
|
||||||
cfg.push_scope(builtin)
|
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
|
||||||
|
)
|
||||||
|
|
||||||
# Builtin paths to configuration files in Spack
|
# Builtin paths to configuration files in Spack
|
||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
@@ -847,16 +954,29 @@ def create() -> Configuration:
|
|||||||
|
|
||||||
# add each scope and its platform-specific directory
|
# add each scope and its platform-specific directory
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
|
||||||
|
# Each scope can have per-platform overrides in subdirectories
|
||||||
|
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
||||||
|
|
||||||
# Each scope can have per-platfom overrides in subdirectories
|
# yield the config incrementally so that each config level's init code can get
|
||||||
_add_platform_scope(cfg, name, path)
|
# data from the one below. This can be tricky, but it enables us to have a
|
||||||
|
# single unified config system.
|
||||||
|
#
|
||||||
|
# TODO: think about whether we want to restrict what types of config can be used
|
||||||
|
# at each level. e.g., we may want to just more forcibly disallow remote
|
||||||
|
# config (which uses ssl and other config options) for some of the scopes,
|
||||||
|
# to make the bootstrap issues more explicit, even if allowing config scope
|
||||||
|
# init to reference lower scopes is more flexible.
|
||||||
|
yield cfg
|
||||||
|
|
||||||
return cfg
|
|
||||||
|
def create() -> Configuration:
|
||||||
|
"""Create a configuration using create_incremental(), return the last yielded result."""
|
||||||
|
return list(create_incremental())[-1]
|
||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||||
@@ -952,10 +1072,11 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
return CONFIG.set(path, value, scope)
|
result = CONFIG.set(path, value, scope)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> Dict[str, ConfigScope]:
|
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return CONFIG.scopes
|
return CONFIG.scopes
|
||||||
|
|
||||||
@@ -1409,7 +1530,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_configuration(
|
def use_configuration(
|
||||||
*scopes_or_paths: Union[ConfigScope, str]
|
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
|
||||||
) -> Generator[Configuration, None, None]:
|
) -> Generator[Configuration, None, None]:
|
||||||
"""Use the configuration scopes passed as arguments within the context manager.
|
"""Use the configuration scopes passed as arguments within the context manager.
|
||||||
|
|
||||||
@@ -1424,7 +1545,7 @@ def use_configuration(
|
|||||||
global CONFIG
|
global CONFIG
|
||||||
|
|
||||||
# Normalize input and construct a Configuration object
|
# Normalize input and construct a Configuration object
|
||||||
configuration = _config_from(scopes_or_paths)
|
configuration = create_from(*scopes_or_paths)
|
||||||
CONFIG.clear_caches(), configuration.clear_caches()
|
CONFIG.clear_caches(), configuration.clear_caches()
|
||||||
|
|
||||||
saved_config, CONFIG = CONFIG, configuration
|
saved_config, CONFIG = CONFIG, configuration
|
||||||
@@ -1435,115 +1556,44 @@ def use_configuration(
|
|||||||
CONFIG = saved_config
|
CONFIG = saved_config
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
|
||||||
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
if isinstance(entry, tuple):
|
||||||
scopes = []
|
return entry
|
||||||
for scope_or_path in scopes_or_paths:
|
|
||||||
# If we have a config scope we are already done
|
default_priority = ConfigScopePriority.CONFIG_FILES
|
||||||
if isinstance(scope_or_path, ConfigScope):
|
if isinstance(entry, ConfigScope):
|
||||||
scopes.append(scope_or_path)
|
return default_priority, entry
|
||||||
continue
|
|
||||||
|
|
||||||
# Otherwise we need to construct it
|
# Otherwise we need to construct it
|
||||||
path = os.path.normpath(scope_or_path)
|
path = os.path.normpath(entry)
|
||||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||||
name = os.path.basename(path)
|
name = os.path.basename(path)
|
||||||
scopes.append(DirectoryConfigScope(name, path))
|
return default_priority, DirectoryConfigScope(name, path)
|
||||||
|
|
||||||
configuration = Configuration(*scopes)
|
|
||||||
return configuration
|
|
||||||
|
|
||||||
|
|
||||||
def raw_github_gitlab_url(url: str) -> str:
|
@lang.memoized
|
||||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
|
||||||
|
"""Creates a configuration object from the scopes passed in input.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url: url to be converted to raw form
|
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
|
||||||
|
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
|
||||||
|
string is given, a DirectoryConfigScope is created from it.
|
||||||
|
|
||||||
Returns:
|
Examples:
|
||||||
Raw github/gitlab url or the original url
|
|
||||||
|
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
|
||||||
|
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
|
||||||
|
>>> cfg = create_from(
|
||||||
|
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
|
||||||
|
... (ConfigScopePriority.BUILTIN, builtin_scope)
|
||||||
|
... )
|
||||||
"""
|
"""
|
||||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
|
||||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
result = Configuration()
|
||||||
# removed and or, '/blame' if needed.
|
for priority, scope in scopes_with_priority:
|
||||||
if "github" in url or "gitlab" in url:
|
result.push_scope(scope, priority=priority)
|
||||||
return url.replace("/blob/", "/raw/")
|
return result
|
||||||
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
def collect_urls(base_url: str) -> list:
|
|
||||||
"""Return a list of configuration URLs.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
base_url: URL for a configuration (yaml) file or a directory
|
|
||||||
containing yaml file(s)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of configuration file(s) or empty list if none
|
|
||||||
"""
|
|
||||||
if not base_url:
|
|
||||||
return []
|
|
||||||
|
|
||||||
extension = ".yaml"
|
|
||||||
|
|
||||||
if base_url.endswith(extension):
|
|
||||||
return [base_url]
|
|
||||||
|
|
||||||
# Collect configuration URLs if the base_url is a "directory".
|
|
||||||
_, links = web_util.spider(base_url, 0)
|
|
||||||
return [link for link in links if link.endswith(extension)]
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
|
||||||
"""Retrieve configuration file(s) at the specified URL.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
url: URL for a configuration (yaml) file or a directory containing
|
|
||||||
yaml file(s)
|
|
||||||
dest_dir: destination directory
|
|
||||||
skip_existing: Skip files that already exist in dest_dir if
|
|
||||||
``True``; otherwise, replace those files
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path to the corresponding file if URL is or contains a
|
|
||||||
single file and it is the only file in the destination directory or
|
|
||||||
the root (dest_dir) directory if multiple configuration files exist
|
|
||||||
or are retrieved.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _fetch_file(url):
|
|
||||||
raw = raw_github_gitlab_url(url)
|
|
||||||
tty.debug(f"Reading config from url {raw}")
|
|
||||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
|
||||||
|
|
||||||
if not url:
|
|
||||||
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
|
||||||
|
|
||||||
# Return the local path to the cached configuration file OR to the
|
|
||||||
# directory containing the cached configuration files.
|
|
||||||
config_links = collect_urls(url)
|
|
||||||
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
|
||||||
|
|
||||||
paths = []
|
|
||||||
for config_url in config_links:
|
|
||||||
basename = os.path.basename(config_url)
|
|
||||||
if skip_existing and basename in existing_files:
|
|
||||||
tty.warn(
|
|
||||||
f"Will not fetch configuration from {config_url} since a "
|
|
||||||
f"version already exists in {dest_dir}"
|
|
||||||
)
|
|
||||||
path = os.path.join(dest_dir, basename)
|
|
||||||
else:
|
|
||||||
path = _fetch_file(config_url)
|
|
||||||
|
|
||||||
if path:
|
|
||||||
paths.append(path)
|
|
||||||
|
|
||||||
if paths:
|
|
||||||
return dest_dir if len(paths) > 1 else paths[0]
|
|
||||||
|
|
||||||
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_mark_from_yaml_data(obj):
|
def get_mark_from_yaml_data(obj):
|
||||||
@@ -1672,3 +1722,7 @@ def get_path(path, data):
|
|||||||
|
|
||||||
# give up and return None if nothing worked
|
# give up and return None if nothing worked
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class RecursiveIncludeError(spack.error.SpackError):
|
||||||
|
"""Too many levels of recursive includes."""
|
||||||
|
@@ -25,7 +25,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _check_concrete(spec):
|
def _check_concrete(spec: "spack.spec.Spec") -> None:
|
||||||
"""If the spec is not concrete, raise a ValueError"""
|
"""If the spec is not concrete, raise a ValueError"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
||||||
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
|||||||
spec = _get_spec(prefix)
|
spec = _get_spec(prefix)
|
||||||
|
|
||||||
if spec:
|
if spec:
|
||||||
spec.prefix = prefix
|
spec.set_prefix(prefix)
|
||||||
specs.append(spec)
|
specs.append(spec)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class DirectoryLayout:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root,
|
root: str,
|
||||||
*,
|
*,
|
||||||
projections: Optional[Dict[str, str]] = None,
|
projections: Optional[Dict[str, str]] = None,
|
||||||
hash_length: Optional[int] = None,
|
hash_length: Optional[int] = None,
|
||||||
@@ -120,17 +120,17 @@ def __init__(
|
|||||||
self.manifest_file_name = "install_manifest.json"
|
self.manifest_file_name = "install_manifest.json"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hidden_file_regexes(self):
|
def hidden_file_regexes(self) -> Tuple[str]:
|
||||||
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
||||||
|
|
||||||
def relative_path_for_spec(self, spec):
|
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
projection = spack.projections.get_projection(self.projections, spec)
|
projection = spack.projections.get_projection(self.projections, spec)
|
||||||
path = spec.format_path(projection)
|
path = spec.format_path(projection)
|
||||||
return str(Path(path))
|
return str(Path(path))
|
||||||
|
|
||||||
def write_spec(self, spec, path):
|
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
@@ -138,7 +138,7 @@ def write_spec(self, spec, path):
|
|||||||
# the full provenance, so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
spec.to_json(f, hash=ht.dag_hash)
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
|
|
||||||
def write_host_environment(self, spec):
|
def write_host_environment(self, spec: "spack.spec.Spec") -> None:
|
||||||
"""The host environment is a json file with os, kernel, and spack
|
"""The host environment is a json file with os, kernel, and spack
|
||||||
versioning. We use it in the case that an analysis later needs to
|
versioning. We use it in the case that an analysis later needs to
|
||||||
easily access this information.
|
easily access this information.
|
||||||
@@ -148,7 +148,7 @@ def write_host_environment(self, spec):
|
|||||||
with open(env_file, "w", encoding="utf-8") as fd:
|
with open(env_file, "w", encoding="utf-8") as fd:
|
||||||
sjson.dump(environ, fd)
|
sjson.dump(environ, fd)
|
||||||
|
|
||||||
def read_spec(self, path):
|
def read_spec(self, path: str) -> "spack.spec.Spec":
|
||||||
"""Read the contents of a file and parse them as a spec"""
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
try:
|
try:
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
@@ -159,26 +159,28 @@ def read_spec(self, path):
|
|||||||
# Too late for conversion; spec_file_path() already called.
|
# Too late for conversion; spec_file_path() already called.
|
||||||
spec = spack.spec.Spec.from_yaml(f)
|
spec = spack.spec.Spec.from_yaml(f)
|
||||||
else:
|
else:
|
||||||
raise SpecReadError(
|
raise SpecReadError(f"Did not recognize spec file extension: {extension}")
|
||||||
"Did not recognize spec file extension:" " {0}".format(extension)
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if spack.config.get("config:debug"):
|
if spack.config.get("config:debug"):
|
||||||
raise
|
raise
|
||||||
raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
|
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}")
|
||||||
|
|
||||||
# Specs read from actual installations are always concrete
|
# Specs read from actual installations are always concrete
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
def spec_file_path(self, spec):
|
def spec_file_path(self, spec: "spack.spec.Spec") -> str:
|
||||||
"""Gets full path to spec file"""
|
"""Gets full path to spec file"""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
def deprecated_file_path(
|
||||||
|
self,
|
||||||
|
deprecated_spec: "spack.spec.Spec",
|
||||||
|
deprecator_spec: Optional["spack.spec.Spec"] = None,
|
||||||
|
) -> str:
|
||||||
"""Gets full path to spec file for deprecated spec
|
"""Gets full path to spec file for deprecated spec
|
||||||
|
|
||||||
If the deprecator_spec is provided, use that. Otherwise, assume
|
If the deprecator_spec is provided, use that. Otherwise, assume
|
||||||
@@ -212,16 +214,16 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
|||||||
|
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def metadata_path(self, spec):
|
def metadata_path(self, spec: "spack.spec.Spec") -> str:
|
||||||
return os.path.join(spec.prefix, self.metadata_dir)
|
return os.path.join(spec.prefix, self.metadata_dir)
|
||||||
|
|
||||||
def env_metadata_path(self, spec):
|
def env_metadata_path(self, spec: "spack.spec.Spec") -> str:
|
||||||
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
||||||
|
|
||||||
def build_packages_path(self, spec):
|
def build_packages_path(self, spec: "spack.spec.Spec") -> str:
|
||||||
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
||||||
|
|
||||||
def create_install_directory(self, spec):
|
def create_install_directory(self, spec: "spack.spec.Spec") -> None:
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
# Create install directory with properly configured permissions
|
# Create install directory with properly configured permissions
|
||||||
@@ -239,7 +241,7 @@ def create_install_directory(self, spec):
|
|||||||
|
|
||||||
self.write_spec(spec, self.spec_file_path(spec))
|
self.write_spec(spec, self.spec_file_path(spec))
|
||||||
|
|
||||||
def ensure_installed(self, spec):
|
def ensure_installed(self, spec: "spack.spec.Spec") -> None:
|
||||||
"""
|
"""
|
||||||
Throws InconsistentInstallDirectoryError if:
|
Throws InconsistentInstallDirectoryError if:
|
||||||
1. spec prefix does not exist
|
1. spec prefix does not exist
|
||||||
@@ -266,7 +268,7 @@ def ensure_installed(self, spec):
|
|||||||
"Spec file in %s does not match hash!" % spec_file_path
|
"Spec file in %s does not match hash!" % spec_file_path
|
||||||
)
|
)
|
||||||
|
|
||||||
def path_for_spec(self, spec):
|
def path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
||||||
"""Return absolute path from the root to a directory for the spec."""
|
"""Return absolute path from the root to a directory for the spec."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
@@ -277,23 +279,13 @@ def path_for_spec(self, spec):
|
|||||||
assert not path.startswith(self.root)
|
assert not path.startswith(self.root)
|
||||||
return os.path.join(self.root, path)
|
return os.path.join(self.root, path)
|
||||||
|
|
||||||
def remove_install_directory(self, spec, deprecated=False):
|
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None:
|
||||||
"""Removes a prefix and any empty parent directories from the root.
|
"""Removes a prefix and any empty parent directories from the root.
|
||||||
Raised RemoveFailedError if something goes wrong.
|
Raised RemoveFailedError if something goes wrong.
|
||||||
"""
|
"""
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
assert path.startswith(self.root)
|
assert path.startswith(self.root)
|
||||||
|
|
||||||
# Windows readonly files cannot be removed by Python
|
|
||||||
# directly, change permissions before attempting to remove
|
|
||||||
if sys.platform == "win32":
|
|
||||||
kwargs = {
|
|
||||||
"ignore_errors": False,
|
|
||||||
"onerror": fs.readonly_file_handler(ignore_errors=False),
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
kwargs = {} # the default value for ignore_errors is false
|
|
||||||
|
|
||||||
if deprecated:
|
if deprecated:
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
@@ -304,7 +296,16 @@ def remove_install_directory(self, spec, deprecated=False):
|
|||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
elif os.path.exists(path):
|
elif os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(path, **kwargs)
|
if sys.platform == "win32":
|
||||||
|
# Windows readonly files cannot be removed by Python
|
||||||
|
# directly, change permissions before attempting to remove
|
||||||
|
shutil.rmtree(
|
||||||
|
path,
|
||||||
|
ignore_errors=False,
|
||||||
|
onerror=fs.readonly_file_handler(ignore_errors=False),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
|
|
||||||
|
@@ -12,3 +12,13 @@ class InstallRecordStatus(enum.Flag):
|
|||||||
DEPRECATED = enum.auto()
|
DEPRECATED = enum.auto()
|
||||||
MISSING = enum.auto()
|
MISSING = enum.auto()
|
||||||
ANY = INSTALLED | DEPRECATED | MISSING
|
ANY = INSTALLED | DEPRECATED | MISSING
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigScopePriority(enum.IntEnum):
|
||||||
|
"""Priorities of the different kind of config scopes used by Spack"""
|
||||||
|
|
||||||
|
BUILTIN = 0
|
||||||
|
CONFIG_FILES = 1
|
||||||
|
CUSTOM = 2
|
||||||
|
ENVIRONMENT = 3
|
||||||
|
COMMAND_LINE = 4
|
||||||
|
@@ -10,8 +10,6 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import urllib.parse
|
|
||||||
import urllib.request
|
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
@@ -32,7 +30,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.merged
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.spec_list
|
import spack.spec_list
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -43,7 +40,6 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url
|
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
@@ -51,6 +47,8 @@
|
|||||||
from spack.spec_list import SpecList
|
from spack.spec_list import SpecList
|
||||||
from spack.util.path import substitute_path_variables
|
from spack.util.path import substitute_path_variables
|
||||||
|
|
||||||
|
from ..enums import ConfigScopePriority
|
||||||
|
|
||||||
SpecPair = spack.concretize.SpecPair
|
SpecPair = spack.concretize.SpecPair
|
||||||
|
|
||||||
#: environment variable used to indicate the active environment
|
#: environment variable used to indicate the active environment
|
||||||
@@ -387,6 +385,7 @@ def create_in_dir(
|
|||||||
# dev paths in this environment to refer to their original
|
# dev paths in this environment to refer to their original
|
||||||
# locations.
|
# locations.
|
||||||
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
||||||
|
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
@@ -403,8 +402,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
dev_path = substitute_path_variables(entry["path"])
|
dev_path = substitute_path_variables(entry["path"])
|
||||||
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
||||||
|
|
||||||
# Skip if the expanded path is the same (e.g. when absolute)
|
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
||||||
if dev_path == expanded_path:
|
if entry["path"] == expanded_path:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
||||||
@@ -419,6 +418,34 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
env._re_read()
|
env._re_read()
|
||||||
|
|
||||||
|
|
||||||
|
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
|
||||||
|
"""When initializing the environment from a manifest file and we plan
|
||||||
|
to store the environment in a different directory, we have to rewrite
|
||||||
|
relative repo paths to absolute ones and expand environment variables."""
|
||||||
|
with env:
|
||||||
|
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
|
||||||
|
if not repos_specs:
|
||||||
|
return
|
||||||
|
for i, entry in enumerate(repos_specs):
|
||||||
|
repo_path = substitute_path_variables(entry)
|
||||||
|
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
|
||||||
|
|
||||||
|
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
||||||
|
if entry == expanded_path:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
|
||||||
|
|
||||||
|
repos_specs[i] = expanded_path
|
||||||
|
|
||||||
|
spack.config.set("repos", repos_specs, scope=env.scope_name)
|
||||||
|
|
||||||
|
env.repos_specs = None
|
||||||
|
# If we changed the environment's spack.yaml scope, that will not be reflected
|
||||||
|
# in the manifest that we read
|
||||||
|
env._re_read()
|
||||||
|
|
||||||
|
|
||||||
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
||||||
"""Returns the directory associated with a named environment.
|
"""Returns the directory associated with a named environment.
|
||||||
|
|
||||||
@@ -546,13 +573,6 @@ def _write_yaml(data, str_or_file):
|
|||||||
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
def _eval_conditional(string):
|
|
||||||
"""Evaluate conditional definitions using restricted variable scope."""
|
|
||||||
valid_variables = spack.spec.get_host_environment()
|
|
||||||
valid_variables.update({"re": re, "env": os.environ})
|
|
||||||
return eval(string, valid_variables)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_dev_spec_and_has_changed(spec):
|
def _is_dev_spec_and_has_changed(spec):
|
||||||
"""Check if the passed spec is a dev build and whether it has changed since the
|
"""Check if the passed spec is a dev build and whether it has changed since the
|
||||||
last installation"""
|
last installation"""
|
||||||
@@ -985,7 +1005,7 @@ def _process_definition(self, entry):
|
|||||||
"""Process a single spec definition item."""
|
"""Process a single spec definition item."""
|
||||||
when_string = entry.get("when")
|
when_string = entry.get("when")
|
||||||
if when_string is not None:
|
if when_string is not None:
|
||||||
when = _eval_conditional(when_string)
|
when = spack.spec.eval_conditional(when_string)
|
||||||
assert len([x for x in entry if x != "when"]) == 1
|
assert len([x for x in entry if x != "when"]) == 1
|
||||||
else:
|
else:
|
||||||
when = True
|
when = True
|
||||||
@@ -1530,9 +1550,6 @@ def _get_specs_to_concretize(
|
|||||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||||
|
|
||||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||||
# Avoid cyclic dependency
|
|
||||||
import spack.solver.asp
|
|
||||||
|
|
||||||
# Exit early if the set of concretized specs is the set of user specs
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
||||||
if not new_user_specs:
|
if not new_user_specs:
|
||||||
@@ -2392,6 +2409,8 @@ def invalidate_repository_cache(self):
|
|||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self._previous_active = _active_environment
|
self._previous_active = _active_environment
|
||||||
|
if self._previous_active:
|
||||||
|
deactivate()
|
||||||
activate(self)
|
activate(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -2641,20 +2660,23 @@ def _ensure_env_dir():
|
|||||||
# error handling for bad manifests is handled on other code paths
|
# error handling for bad manifests is handled on other code paths
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# TODO: make this recursive
|
||||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||||
for include in includes:
|
for include in includes:
|
||||||
if os.path.isabs(include):
|
included_path = spack.config.included_path(include)
|
||||||
|
path = included_path.path
|
||||||
|
if os.path.isabs(path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
abspath = pathlib.Path(os.path.normpath(environment_dir / path))
|
||||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||||
if common_path != environment_dir:
|
if common_path != environment_dir:
|
||||||
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
tty.debug(f"Will not copy relative include file from outside environment: {path}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
orig_abspath = os.path.normpath(envfile.parent / include)
|
orig_abspath = os.path.normpath(envfile.parent / path)
|
||||||
if not os.path.exists(orig_abspath):
|
if not os.path.exists(orig_abspath):
|
||||||
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
tty.warn(f"Included file does not exist; will not copy: '{path}'")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fs.touchp(abspath)
|
fs.touchp(abspath)
|
||||||
@@ -2877,7 +2899,7 @@ def extract_name(_item):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
condition_str = item.get("when", "True")
|
condition_str = item.get("when", "True")
|
||||||
if not _eval_conditional(condition_str):
|
if not spack.spec.eval_conditional(condition_str):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
yield idx, item
|
yield idx, item
|
||||||
@@ -2938,127 +2960,20 @@ def __iter__(self):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.manifest_file)
|
return str(self.manifest_file)
|
||||||
|
|
||||||
@property
|
|
||||||
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|
||||||
"""List of included configuration scopes from the manifest.
|
|
||||||
|
|
||||||
Scopes are listed in the YAML file in order from highest to
|
|
||||||
lowest precedence, so configuration from earlier scope will take
|
|
||||||
precedence over later ones.
|
|
||||||
|
|
||||||
This routine returns them in the order they should be pushed onto
|
|
||||||
the internal scope stack (so, in reverse, from lowest to highest).
|
|
||||||
|
|
||||||
Returns: Configuration scopes associated with the environment manifest
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
SpackEnvironmentError: if the manifest includes a remote file but
|
|
||||||
no configuration stage directory has been identified
|
|
||||||
"""
|
|
||||||
scopes: List[spack.config.ConfigScope] = []
|
|
||||||
|
|
||||||
# load config scopes added via 'include:', in reverse so that
|
|
||||||
# highest-precedence scopes are last.
|
|
||||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
|
||||||
missing = []
|
|
||||||
for i, config_path in enumerate(reversed(includes)):
|
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
|
||||||
config_path = substitute_path_variables(config_path)
|
|
||||||
include_url = urllib.parse.urlparse(config_path)
|
|
||||||
|
|
||||||
# If scheme is not valid, config_path is not a url
|
|
||||||
# of a type Spack is generally aware
|
|
||||||
if spack.util.url.validate_scheme(include_url.scheme):
|
|
||||||
# Transform file:// URLs to direct includes.
|
|
||||||
if include_url.scheme == "file":
|
|
||||||
config_path = urllib.request.url2pathname(include_url.path)
|
|
||||||
|
|
||||||
# Any other URL should be fetched.
|
|
||||||
elif include_url.scheme in ("http", "https", "ftp"):
|
|
||||||
# Stage any remote configuration file(s)
|
|
||||||
staged_configs = (
|
|
||||||
os.listdir(self.config_stage_dir)
|
|
||||||
if os.path.exists(self.config_stage_dir)
|
|
||||||
else []
|
|
||||||
)
|
|
||||||
remote_path = urllib.request.url2pathname(include_url.path)
|
|
||||||
basename = os.path.basename(remote_path)
|
|
||||||
if basename in staged_configs:
|
|
||||||
# Do NOT re-stage configuration files over existing
|
|
||||||
# ones with the same name since there is a risk of
|
|
||||||
# losing changes (e.g., from 'spack config update').
|
|
||||||
tty.warn(
|
|
||||||
"Will not re-stage configuration from {0} to avoid "
|
|
||||||
"losing changes to the already staged file of the "
|
|
||||||
"same name.".format(remote_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Recognize the configuration stage directory
|
|
||||||
# is flattened to ensure a single copy of each
|
|
||||||
# configuration file.
|
|
||||||
config_path = self.config_stage_dir
|
|
||||||
if basename.endswith(".yaml"):
|
|
||||||
config_path = os.path.join(config_path, basename)
|
|
||||||
else:
|
|
||||||
staged_path = spack.config.fetch_remote_configs(
|
|
||||||
config_path, str(self.config_stage_dir), skip_existing=True
|
|
||||||
)
|
|
||||||
if not staged_path:
|
|
||||||
raise SpackEnvironmentError(
|
|
||||||
"Unable to fetch remote configuration {0}".format(config_path)
|
|
||||||
)
|
|
||||||
config_path = staged_path
|
|
||||||
|
|
||||||
elif include_url.scheme:
|
|
||||||
raise ValueError(
|
|
||||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
|
||||||
f"environment include: {config_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# treat relative paths as relative to the environment
|
|
||||||
if not os.path.isabs(config_path):
|
|
||||||
config_path = os.path.join(self.manifest_dir, config_path)
|
|
||||||
config_path = os.path.normpath(os.path.realpath(config_path))
|
|
||||||
|
|
||||||
if os.path.isdir(config_path):
|
|
||||||
# directories are treated as regular ConfigScopes
|
|
||||||
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
|
||||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
|
||||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
|
||||||
elif os.path.exists(config_path):
|
|
||||||
# files are assumed to be SingleFileScopes
|
|
||||||
config_name = f"env:{self.name}:{config_path}"
|
|
||||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
|
||||||
scopes.append(
|
|
||||||
spack.config.SingleFileScope(
|
|
||||||
config_name, config_path, spack.schema.merged.schema
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
missing.append(config_path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if missing:
|
|
||||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
|
||||||
msg += "\n {0}".format("\n ".join(missing))
|
|
||||||
raise spack.config.ConfigFileError(msg)
|
|
||||||
|
|
||||||
return scopes
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||||
if self._config_scopes is not None:
|
if self._config_scopes is not None:
|
||||||
return self._config_scopes
|
return self._config_scopes
|
||||||
|
|
||||||
scopes: List[spack.config.ConfigScope] = [
|
scopes: List[spack.config.ConfigScope] = [
|
||||||
*self.included_config_scopes,
|
|
||||||
spack.config.SingleFileScope(
|
spack.config.SingleFileScope(
|
||||||
self.scope_name,
|
self.scope_name,
|
||||||
str(self.manifest_file),
|
str(self.manifest_file),
|
||||||
spack.schema.env.schema,
|
spack.schema.env.schema,
|
||||||
yaml_path=[TOP_LEVEL_KEY],
|
yaml_path=[TOP_LEVEL_KEY],
|
||||||
),
|
)
|
||||||
]
|
]
|
||||||
ensure_no_disallowed_env_config_mods(scopes)
|
ensure_no_disallowed_env_config_mods(scopes)
|
||||||
self._config_scopes = scopes
|
self._config_scopes = scopes
|
||||||
@@ -3067,14 +2982,12 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
def prepare_config_scope(self) -> None:
|
def prepare_config_scope(self) -> None:
|
||||||
"""Add the manifest's scopes to the global configuration search path."""
|
"""Add the manifest's scopes to the global configuration search path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.push_scope(scope)
|
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
|
|
||||||
def deactivate_config_scope(self) -> None:
|
def deactivate_config_scope(self) -> None:
|
||||||
"""Remove any of the manifest's scopes from the global config path."""
|
"""Remove any of the manifest's scopes from the global config path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.remove_scope(scope.name)
|
spack.config.CONFIG.remove_scope(scope.name)
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_config(self):
|
def use_config(self):
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Callable, Dict, Optional
|
from typing import Callable, Dict, List, Optional
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
@@ -78,7 +78,7 @@ def view_copy(
|
|||||||
|
|
||||||
# Order of this dict is somewhat irrelevant
|
# Order of this dict is somewhat irrelevant
|
||||||
prefix_to_projection = {
|
prefix_to_projection = {
|
||||||
s.prefix: view.get_projection_for_spec(s)
|
str(s.prefix): view.get_projection_for_spec(s)
|
||||||
for s in spec.traverse(root=True, order="breadth")
|
for s in spec.traverse(root=True, order="breadth")
|
||||||
if not s.external
|
if not s.external
|
||||||
}
|
}
|
||||||
@@ -185,7 +185,7 @@ def __init__(
|
|||||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||||
self._link(src, dst, self, spec)
|
self._link(src, dst, self, spec)
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs):
|
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Add given specs to view.
|
Add given specs to view.
|
||||||
|
|
||||||
@@ -200,19 +200,19 @@ def add_specs(self, *specs, **kwargs):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def add_standalone(self, spec):
|
def add_standalone(self, spec: spack.spec.Spec) -> bool:
|
||||||
"""
|
"""
|
||||||
Add (link) a standalone package into this view.
|
Add (link) a standalone package into this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def check_added(self, spec):
|
def check_added(self, spec: spack.spec.Spec) -> bool:
|
||||||
"""
|
"""
|
||||||
Check if the given concrete spec is active in this view.
|
Check if the given concrete spec is active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_specs(self, *specs, **kwargs):
|
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Removes given specs from view.
|
Removes given specs from view.
|
||||||
|
|
||||||
@@ -231,25 +231,25 @@ def remove_specs(self, *specs, **kwargs):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_standalone(self, spec):
|
def remove_standalone(self, spec: spack.spec.Spec) -> None:
|
||||||
"""
|
"""
|
||||||
Remove (unlink) a standalone package from this view.
|
Remove (unlink) a standalone package from this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_projection_for_spec(self, spec):
|
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str:
|
||||||
"""
|
"""
|
||||||
Get the projection in this view for a spec.
|
Get the projection in this view for a spec.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_all_specs(self):
|
def get_all_specs(self) -> List[spack.spec.Spec]:
|
||||||
"""
|
"""
|
||||||
Get all specs currently active in this view.
|
Get all specs currently active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_spec(self, spec):
|
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
|
||||||
"""
|
"""
|
||||||
Return the actual spec linked in this view (i.e. do not look it up
|
Return the actual spec linked in this view (i.e. do not look it up
|
||||||
in the database by name).
|
in the database by name).
|
||||||
@@ -263,7 +263,7 @@ def get_spec(self, spec):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def print_status(self, *specs, **kwargs):
|
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Print a short summary about the given specs, detailing whether..
|
Print a short summary about the given specs, detailing whether..
|
||||||
* ..they are active in the view.
|
* ..they are active in the view.
|
||||||
@@ -694,7 +694,7 @@ def _sanity_check_view_projection(self, specs):
|
|||||||
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
||||||
seen[metadata_dir] = current_spec
|
seen[metadata_dir] = current_spec
|
||||||
|
|
||||||
def add_specs(self, *specs: spack.spec.Spec) -> None:
|
def add_specs(self, *specs, **kwargs) -> None:
|
||||||
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
||||||
assert all((s.concrete for s in specs))
|
assert all((s.concrete for s in specs))
|
||||||
if len(specs) == 0:
|
if len(specs) == 0:
|
||||||
@@ -831,7 +831,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
#####################
|
#####################
|
||||||
# utility functions #
|
# utility functions #
|
||||||
#####################
|
#####################
|
||||||
def get_spec_from_file(filename):
|
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]:
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
|
@@ -2,200 +2,14 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import io
|
import io
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import Dict, List, Union
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
from llnl.util.filesystem import visit_directory_tree
|
||||||
from llnl.util.lang import stable_partition
|
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.util.elf as elf
|
import spack.verify_libraries
|
||||||
|
|
||||||
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
|
||||||
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
|
||||||
#: default search paths of the dynamic linker.
|
|
||||||
ALLOW_UNRESOLVED = [
|
|
||||||
# kernel
|
|
||||||
"linux-vdso.so.*",
|
|
||||||
"libselinux.so.*",
|
|
||||||
# musl libc
|
|
||||||
"ld-musl-*.so.*",
|
|
||||||
# glibc
|
|
||||||
"ld-linux*.so.*",
|
|
||||||
"ld64.so.*",
|
|
||||||
"libanl.so.*",
|
|
||||||
"libc.so.*",
|
|
||||||
"libdl.so.*",
|
|
||||||
"libm.so.*",
|
|
||||||
"libmemusage.so.*",
|
|
||||||
"libmvec.so.*",
|
|
||||||
"libnsl.so.*",
|
|
||||||
"libnss_compat.so.*",
|
|
||||||
"libnss_db.so.*",
|
|
||||||
"libnss_dns.so.*",
|
|
||||||
"libnss_files.so.*",
|
|
||||||
"libnss_hesiod.so.*",
|
|
||||||
"libpcprofile.so.*",
|
|
||||||
"libpthread.so.*",
|
|
||||||
"libresolv.so.*",
|
|
||||||
"librt.so.*",
|
|
||||||
"libSegFault.so.*",
|
|
||||||
"libthread_db.so.*",
|
|
||||||
"libutil.so.*",
|
|
||||||
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
|
||||||
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
|
||||||
"libasan.so.*",
|
|
||||||
"libatomic.so.*",
|
|
||||||
"libcc1.so.*",
|
|
||||||
"libgcc_s.so.*",
|
|
||||||
"libgfortran.so.*",
|
|
||||||
"libgomp.so.*",
|
|
||||||
"libitm.so.*",
|
|
||||||
"liblsan.so.*",
|
|
||||||
"libquadmath.so.*",
|
|
||||||
"libssp.so.*",
|
|
||||||
"libstdc++.so.*",
|
|
||||||
"libtsan.so.*",
|
|
||||||
"libubsan.so.*",
|
|
||||||
# systemd
|
|
||||||
"libudev.so.*",
|
|
||||||
# cuda driver
|
|
||||||
"libcuda.so.*",
|
|
||||||
# intel-oneapi-runtime
|
|
||||||
"libur_loader.so.*",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
|
||||||
return (
|
|
||||||
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
|
||||||
and parent.is_little_endian == child.is_little_endian
|
|
||||||
and parent.is_64_bit == child.is_64_bit
|
|
||||||
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
|
||||||
try:
|
|
||||||
with open(candidate_path, "rb") as g:
|
|
||||||
return is_compatible(current_elf, elf.parse_elf(g))
|
|
||||||
except (OSError, elf.ElfParsingError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Problem:
|
|
||||||
def __init__(
|
|
||||||
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
|
||||||
) -> None:
|
|
||||||
self.resolved = resolved
|
|
||||||
self.unresolved = unresolved
|
|
||||||
self.relative_rpaths = relative_rpaths
|
|
||||||
|
|
||||||
|
|
||||||
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
|
||||||
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
|
||||||
self.problems: Dict[str, Problem] = {}
|
|
||||||
self._allow_unresolved_regex = re.compile(
|
|
||||||
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
|
||||||
)
|
|
||||||
|
|
||||||
def allow_unresolved(self, needed: bytes) -> bool:
|
|
||||||
try:
|
|
||||||
name = needed.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return False
|
|
||||||
return bool(self._allow_unresolved_regex.match(name))
|
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
# We work with byte strings for paths.
|
|
||||||
path = os.path.join(root, rel_path).encode("utf-8")
|
|
||||||
|
|
||||||
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
|
||||||
origin = os.path.dirname(path)
|
|
||||||
|
|
||||||
# Retrieve the needed libs + rpaths.
|
|
||||||
try:
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
|
||||||
except (OSError, elf.ElfParsingError):
|
|
||||||
# Not dealing with an invalid ELF file.
|
|
||||||
return
|
|
||||||
|
|
||||||
# If there's no needed libs all is good
|
|
||||||
if not parsed_elf.has_needed:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get the needed libs and rpaths (notice: byte strings)
|
|
||||||
# Don't force an encoding cause paths are just a bag of bytes.
|
|
||||||
needed_libs = parsed_elf.dt_needed_strs
|
|
||||||
|
|
||||||
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
|
||||||
|
|
||||||
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
|
||||||
# supported in general. Also remove empty paths.
|
|
||||||
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
|
||||||
|
|
||||||
# Do not allow relative rpaths (they are relative to the current working directory)
|
|
||||||
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
|
||||||
|
|
||||||
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
|
||||||
# a search.
|
|
||||||
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
|
||||||
|
|
||||||
# Do not allow relative paths in direct libs (they are relative to the current working
|
|
||||||
# directory)
|
|
||||||
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
|
||||||
|
|
||||||
resolved: Dict[bytes, bytes] = {}
|
|
||||||
|
|
||||||
for lib in search_libs:
|
|
||||||
if self.allow_unresolved(lib):
|
|
||||||
continue
|
|
||||||
for rpath in rpaths:
|
|
||||||
candidate = os.path.join(rpath, lib)
|
|
||||||
if candidate_matches(parsed_elf, candidate):
|
|
||||||
resolved[lib] = candidate
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
unresolved.append(lib)
|
|
||||||
|
|
||||||
# Check if directly opened libs are compatible
|
|
||||||
for lib in direct_libs:
|
|
||||||
if candidate_matches(parsed_elf, lib):
|
|
||||||
resolved[lib] = lib
|
|
||||||
else:
|
|
||||||
unresolved.append(lib)
|
|
||||||
|
|
||||||
if unresolved or relative_rpaths:
|
|
||||||
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
|
||||||
|
|
||||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|
||||||
# There can be binaries in .spack/test which shouldn't be checked.
|
|
||||||
if rel_path == ".spack":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class CannotLocateSharedLibraries(spack.error.SpackError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
|
|
||||||
try:
|
|
||||||
return byte_str.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return byte_str
|
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
@@ -206,36 +20,23 @@ def post_install(spec, explicit):
|
|||||||
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
||||||
return
|
return
|
||||||
|
|
||||||
visitor = ResolveSharedElfLibDepsVisitor(
|
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
||||||
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
||||||
)
|
)
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
# All good?
|
|
||||||
if not visitor.problems:
|
if not visitor.problems:
|
||||||
return
|
return
|
||||||
|
|
||||||
# For now just list the issues (print it in ldd style, except we don't recurse)
|
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n")
|
||||||
output = io.StringIO()
|
visitor.write(output)
|
||||||
output.write("not all executables and libraries can resolve their dependencies:\n")
|
|
||||||
for path, problem in visitor.problems.items():
|
|
||||||
output.write(path)
|
|
||||||
output.write("\n")
|
|
||||||
for needed, full_path in problem.resolved.items():
|
|
||||||
output.write(" ")
|
|
||||||
if needed == full_path:
|
|
||||||
output.write(maybe_decode(needed))
|
|
||||||
else:
|
|
||||||
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
|
|
||||||
output.write("\n")
|
|
||||||
for not_found in problem.unresolved:
|
|
||||||
output.write(f" {maybe_decode(not_found)} => not found\n")
|
|
||||||
for relative_rpath in problem.relative_rpaths:
|
|
||||||
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
|
|
||||||
|
|
||||||
message = output.getvalue().strip()
|
message = output.getvalue().strip()
|
||||||
|
|
||||||
if policy == "error":
|
if policy == "error":
|
||||||
raise CannotLocateSharedLibraries(message)
|
raise CannotLocateSharedLibraries(message)
|
||||||
|
|
||||||
tty.warn(message)
|
tty.warn(message)
|
||||||
|
|
||||||
|
|
||||||
|
class CannotLocateSharedLibraries(spack.error.SpackError):
|
||||||
|
pass
|
||||||
|
@@ -21,7 +21,6 @@
|
|||||||
from llnl.util.lang import nullcontext
|
from llnl.util.lang import nullcontext
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.build_environment
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -398,7 +397,7 @@ def stand_alone_tests(self, kwargs):
|
|||||||
Args:
|
Args:
|
||||||
kwargs (dict): arguments to be used by the test process
|
kwargs (dict): arguments to be used by the test process
|
||||||
"""
|
"""
|
||||||
import spack.build_environment
|
import spack.build_environment # avoid circular dependency
|
||||||
|
|
||||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||||
|
|
||||||
@@ -463,6 +462,8 @@ def write_tested_status(self):
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||||
|
import spack.build_environment # avoid circular dependency
|
||||||
|
|
||||||
wdir = "." if work_dir is None else work_dir
|
wdir = "." if work_dir is None else work_dir
|
||||||
tester = pkg.tester
|
tester = pkg.tester
|
||||||
assert test_name and test_name.startswith(
|
assert test_name and test_name.startswith(
|
||||||
|
@@ -47,6 +47,8 @@
|
|||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
|
|
||||||
|
from .enums import ConfigScopePriority
|
||||||
|
|
||||||
#: names of profile statistics
|
#: names of profile statistics
|
||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
|
|
||||||
@@ -872,14 +874,19 @@ def add_command_line_scopes(
|
|||||||
scopes = ev.environment_path_scopes(name, path)
|
scopes = ev.environment_path_scopes(name, path)
|
||||||
if scopes is None:
|
if scopes is None:
|
||||||
if os.path.isdir(path): # directory with config files
|
if os.path.isdir(path): # directory with config files
|
||||||
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
|
cfg.push_scope(
|
||||||
spack.config._add_platform_scope(cfg, name, path, writable=False)
|
spack.config.DirectoryConfigScope(name, path, writable=False),
|
||||||
|
priority=ConfigScopePriority.CUSTOM,
|
||||||
|
)
|
||||||
|
spack.config._add_platform_scope(
|
||||||
|
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||||
|
|
||||||
for scope in scopes:
|
for scope in scopes:
|
||||||
cfg.push_scope(scope)
|
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
|
||||||
|
|
||||||
|
|
||||||
def _main(argv=None):
|
def _main(argv=None):
|
||||||
@@ -952,7 +959,9 @@ def _main(argv=None):
|
|||||||
# Push scopes from the command line last
|
# Push scopes from the command line last
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
spack.config.CONFIG.push_scope(
|
||||||
|
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
|
||||||
|
)
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
|
||||||
@@ -998,6 +1007,7 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
|
|||||||
args, unknown = parser.parse_known_args(main_args.command)
|
args, unknown = parser.parse_known_args(main_args.command)
|
||||||
# we need to inherit verbose since the install command checks for it
|
# we need to inherit verbose since the install command checks for it
|
||||||
args.verbose = main_args.verbose
|
args.verbose = main_args.verbose
|
||||||
|
args.lines = main_args.lines
|
||||||
|
|
||||||
# Now that we know what command this is and what its args are, determine
|
# Now that we know what command this is and what its args are, determine
|
||||||
# whether we can continue with a bad environment and raise if not.
|
# whether we can continue with a bad environment and raise if not.
|
||||||
|
@@ -125,9 +125,10 @@ def windows_establish_runtime_linkage(self):
|
|||||||
# Spack should in general not modify things it has not installed
|
# Spack should in general not modify things it has not installed
|
||||||
# we can reasonably expect externals to have their link interface properly established
|
# we can reasonably expect externals to have their link interface properly established
|
||||||
if sys.platform == "win32" and not self.spec.external:
|
if sys.platform == "win32" and not self.spec.external:
|
||||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||||
self.win_rpath.establish_link()
|
win_rpath.add_rpath(*self.win_add_rpath())
|
||||||
|
win_rpath.establish_link()
|
||||||
|
|
||||||
|
|
||||||
#: Registers which are the detectable packages, by repo and package name
|
#: Registers which are the detectable packages, by repo and package name
|
||||||
@@ -742,7 +743,6 @@ def __init__(self, spec):
|
|||||||
# Set up timing variables
|
# Set up timing variables
|
||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> "PackageBase":
|
def __getitem__(self, key: str) -> "PackageBase":
|
||||||
|
@@ -83,6 +83,7 @@ def __init__(
|
|||||||
level: int,
|
level: int,
|
||||||
working_dir: str,
|
working_dir: str,
|
||||||
reverse: bool = False,
|
reverse: bool = False,
|
||||||
|
ordering_key: Optional[Tuple[str, int]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize a new Patch instance.
|
"""Initialize a new Patch instance.
|
||||||
|
|
||||||
@@ -92,6 +93,7 @@ def __init__(
|
|||||||
level: patch level
|
level: patch level
|
||||||
working_dir: relative path *within* the stage to change to
|
working_dir: relative path *within* the stage to change to
|
||||||
reverse: reverse the patch
|
reverse: reverse the patch
|
||||||
|
ordering_key: key used to ensure patches are applied in a consistent order
|
||||||
"""
|
"""
|
||||||
# validate level (must be an integer >= 0)
|
# validate level (must be an integer >= 0)
|
||||||
if not isinstance(level, int) or not level >= 0:
|
if not isinstance(level, int) or not level >= 0:
|
||||||
@@ -105,6 +107,13 @@ def __init__(
|
|||||||
self.working_dir = working_dir
|
self.working_dir = working_dir
|
||||||
self.reverse = reverse
|
self.reverse = reverse
|
||||||
|
|
||||||
|
# The ordering key is passed when executing package.py directives, and is only relevant
|
||||||
|
# after a solve to build concrete specs with consistently ordered patches. For concrete
|
||||||
|
# specs read from a file, we add patches in the order of its patches variants and the
|
||||||
|
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
|
||||||
|
# on whether ordering_key is None where it's used, just to make static analysis happy.
|
||||||
|
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
|
||||||
|
|
||||||
def apply(self, stage: "spack.stage.Stage") -> None:
|
def apply(self, stage: "spack.stage.Stage") -> None:
|
||||||
"""Apply a patch to source in a stage.
|
"""Apply a patch to source in a stage.
|
||||||
|
|
||||||
@@ -202,9 +211,8 @@ def __init__(
|
|||||||
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
super().__init__(pkg, abs_path, level, working_dir, reverse)
|
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key)
|
||||||
self.path = abs_path
|
self.path = abs_path
|
||||||
self.ordering_key = ordering_key
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sha256(self) -> str:
|
def sha256(self) -> str:
|
||||||
@@ -266,13 +274,11 @@ def __init__(
|
|||||||
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
||||||
(only required for compressed URL patches)
|
(only required for compressed URL patches)
|
||||||
"""
|
"""
|
||||||
super().__init__(pkg, url, level, working_dir, reverse)
|
super().__init__(pkg, url, level, working_dir, reverse, ordering_key)
|
||||||
|
|
||||||
self.url = url
|
self.url = url
|
||||||
self._stage: Optional["spack.stage.Stage"] = None
|
self._stage: Optional["spack.stage.Stage"] = None
|
||||||
|
|
||||||
self.ordering_key = ordering_key
|
|
||||||
|
|
||||||
if allowed_archive(self.url) and not archive_sha256:
|
if allowed_archive(self.url) and not archive_sha256:
|
||||||
raise spack.error.PatchDirectiveError(
|
raise spack.error.PatchDirectiveError(
|
||||||
"Compressed patches require 'archive_sha256' "
|
"Compressed patches require 'archive_sha256' "
|
||||||
|
@@ -108,6 +108,8 @@ def _get_user_cache_path():
|
|||||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||||
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
||||||
|
|
||||||
|
#: concretization cache for Spack concretizations
|
||||||
|
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
|
||||||
|
|
||||||
# Below paths pull configuration from the host environment.
|
# Below paths pull configuration from the host environment.
|
||||||
#
|
#
|
||||||
|
@@ -32,6 +32,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
|
import spack
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -49,6 +50,8 @@
|
|||||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||||
|
|
||||||
|
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||||
|
|
||||||
|
|
||||||
def python_package_for_repo(namespace):
|
def python_package_for_repo(namespace):
|
||||||
"""Returns the full namespace of a repository, given its relative one
|
"""Returns the full namespace of a repository, given its relative one
|
||||||
@@ -909,19 +912,52 @@ def __reduce__(self):
|
|||||||
return RepoPath.unmarshal, self.marshal()
|
return RepoPath.unmarshal, self.marshal()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_package_api_version(
|
||||||
|
config: Dict[str, Any],
|
||||||
|
min_api: Tuple[int, int] = spack.min_package_api_version,
|
||||||
|
max_api: Tuple[int, int] = spack.package_api_version,
|
||||||
|
) -> Tuple[int, int]:
|
||||||
|
api = config.get("api")
|
||||||
|
if api is None:
|
||||||
|
package_api = (1, 0)
|
||||||
|
else:
|
||||||
|
if not isinstance(api, str):
|
||||||
|
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||||
|
api_match = _API_REGEX.match(api)
|
||||||
|
if api_match is None:
|
||||||
|
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||||
|
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
||||||
|
|
||||||
|
if min_api <= package_api <= max_api:
|
||||||
|
return package_api
|
||||||
|
|
||||||
|
min_str = ".".join(str(i) for i in min_api)
|
||||||
|
max_str = ".".join(str(i) for i in max_api)
|
||||||
|
curr_str = ".".join(str(i) for i in package_api)
|
||||||
|
raise BadRepoError(
|
||||||
|
f"Package API v{curr_str} is not supported by this version of Spack ("
|
||||||
|
f"must be between v{min_str} and v{max_str})"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Repo:
|
class Repo:
|
||||||
"""Class representing a package repository in the filesystem.
|
"""Class representing a package repository in the filesystem.
|
||||||
|
|
||||||
Each package repository must have a top-level configuration file
|
Each package repository must have a top-level configuration file called `repo.yaml`.
|
||||||
called `repo.yaml`.
|
|
||||||
|
|
||||||
Currently, `repo.yaml` must define:
|
It contains the following keys:
|
||||||
|
|
||||||
`namespace`:
|
`namespace`:
|
||||||
A Python namespace where the repository's packages should live.
|
A Python namespace where the repository's packages should live.
|
||||||
|
|
||||||
`subdirectory`:
|
`subdirectory`:
|
||||||
An optional subdirectory name where packages are placed
|
An optional subdirectory name where packages are placed
|
||||||
|
|
||||||
|
`api`:
|
||||||
|
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
||||||
|
For the repo to be compatible with the current version of Spack, the version must be
|
||||||
|
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
||||||
|
:py:data:`spack.package_api_version`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -958,7 +994,7 @@ def check(condition, msg):
|
|||||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.namespace = config["namespace"]
|
self.namespace: str = config["namespace"]
|
||||||
check(
|
check(
|
||||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||||
@@ -971,12 +1007,14 @@ def check(condition, msg):
|
|||||||
# Keep name components around for checking prefixes.
|
# Keep name components around for checking prefixes.
|
||||||
self._names = self.full_namespace.split(".")
|
self._names = self.full_namespace.split(".")
|
||||||
|
|
||||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||||
self.packages_path = os.path.join(self.root, packages_dir)
|
self.packages_path = os.path.join(self.root, packages_dir)
|
||||||
check(
|
check(
|
||||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.package_api = _parse_package_api_version(config)
|
||||||
|
|
||||||
# Class attribute overrides by package name
|
# Class attribute overrides by package name
|
||||||
self.overrides = overrides or {}
|
self.overrides = overrides or {}
|
||||||
|
|
||||||
@@ -1026,7 +1064,7 @@ def is_prefix(self, fullname: str) -> bool:
|
|||||||
parts = fullname.split(".")
|
parts = fullname.split(".")
|
||||||
return self._names[: len(parts)] == parts
|
return self._names[: len(parts)] == parts
|
||||||
|
|
||||||
def _read_config(self) -> Dict[str, str]:
|
def _read_config(self) -> Dict[str, Any]:
|
||||||
"""Check for a YAML config file in this db's root directory."""
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
try:
|
try:
|
||||||
with open(self.config_file, encoding="utf-8") as reponame_file:
|
with open(self.config_file, encoding="utf-8") as reponame_file:
|
||||||
@@ -1179,9 +1217,8 @@ def all_package_paths(self) -> Generator[str, None, None]:
|
|||||||
yield self.package_path(name)
|
yield self.package_path(name)
|
||||||
|
|
||||||
def packages_with_tags(self, *tags: str) -> Set[str]:
|
def packages_with_tags(self, *tags: str) -> Set[str]:
|
||||||
v = set(self.tag_index[tags[0].lower()])
|
v = set(self.all_package_names())
|
||||||
for tag in tags[1:]:
|
v.intersection_update(*(self.tag_index[tag.lower()] for tag in tags))
|
||||||
v.intersection_update(self.tag_index[tag.lower()])
|
|
||||||
return v
|
return v
|
||||||
|
|
||||||
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:
|
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:
|
||||||
@@ -1369,6 +1406,8 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
config.write(f" namespace: '{namespace}'\n")
|
config.write(f" namespace: '{namespace}'\n")
|
||||||
if subdir != packages_dir_name:
|
if subdir != packages_dir_name:
|
||||||
config.write(f" subdirectory: '{subdir}'\n")
|
config.write(f" subdirectory: '{subdir}'\n")
|
||||||
|
x, y = spack.package_api_version
|
||||||
|
config.write(f" api: v{x}.{y}\n")
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
# try to clean up.
|
# try to clean up.
|
||||||
|
@@ -7,8 +7,7 @@
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
import jsonschema.validators
|
||||||
import llnl.util.lang
|
|
||||||
|
|
||||||
from spack.error import SpecSyntaxError
|
from spack.error import SpecSyntaxError
|
||||||
|
|
||||||
@@ -18,23 +17,25 @@ class DeprecationMessage(typing.NamedTuple):
|
|||||||
error: bool
|
error: bool
|
||||||
|
|
||||||
|
|
||||||
# jsonschema is imported lazily as it is heavy to import
|
def _validate_spec(validator, is_spec, instance, schema):
|
||||||
# and increases the start-up time
|
"""Check if all additional keys are valid specs."""
|
||||||
def _make_validator():
|
|
||||||
def _validate_spec(validator, is_spec, instance, schema):
|
|
||||||
"""Check if the attributes on instance are valid specs."""
|
|
||||||
import spack.spec_parser
|
import spack.spec_parser
|
||||||
|
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
properties = schema.get("properties") or {}
|
||||||
|
|
||||||
for spec_str in instance:
|
for spec_str in instance:
|
||||||
|
if spec_str in properties:
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
spack.spec_parser.parse(spec_str)
|
spack.spec_parser.parse(spec_str)
|
||||||
except SpecSyntaxError:
|
except SpecSyntaxError:
|
||||||
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
||||||
|
|
||||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
|
||||||
|
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -64,13 +65,11 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
|||||||
if errors:
|
if errors:
|
||||||
yield jsonschema.ValidationError("\n".join(errors))
|
yield jsonschema.ValidationError("\n".join(errors))
|
||||||
|
|
||||||
return jsonschema.validators.extend(
|
|
||||||
|
Validator = jsonschema.validators.extend(
|
||||||
jsonschema.Draft7Validator,
|
jsonschema.Draft7Validator,
|
||||||
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
{"additionalKeysAreSpecs": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
Validator = llnl.util.lang.Singleton(_make_validator)
|
|
||||||
|
|
||||||
|
|
||||||
def _append(string: str) -> bool:
|
def _append(string: str) -> bool:
|
||||||
|
@@ -58,6 +58,15 @@
|
|||||||
{"type": "string"}, # deprecated
|
{"type": "string"}, # deprecated
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"concretization_cache": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"enable": {"type": "boolean"},
|
||||||
|
"url": {"type": "string"},
|
||||||
|
"entry_limit": {"type": "integer", "minimum": 0},
|
||||||
|
"size_limit": {"type": "integer", "minimum": 0},
|
||||||
|
},
|
||||||
|
},
|
||||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||||
"install_path_scheme": {"type": "string"}, # deprecated
|
"install_path_scheme": {"type": "string"}, # deprecated
|
||||||
"build_stage": {
|
"build_stage": {
|
||||||
|
@@ -29,11 +29,7 @@
|
|||||||
# merged configuration scope schemas
|
# merged configuration scope schemas
|
||||||
spack.schema.merged.properties,
|
spack.schema.merged.properties,
|
||||||
# extra environment schema properties
|
# extra environment schema properties
|
||||||
{
|
{"specs": spec_list_schema, "include_concrete": include_concrete},
|
||||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
|
||||||
"specs": spec_list_schema,
|
|
||||||
"include_concrete": include_concrete,
|
|
||||||
},
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
41
lib/spack/spack/schema/include.py
Normal file
41
lib/spack/spack/schema/include.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Schema for include.yaml configuration file.
|
||||||
|
|
||||||
|
.. literalinclude:: _spack_root/lib/spack/spack/schema/include.py
|
||||||
|
:lines: 12-
|
||||||
|
"""
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
#: Properties for inclusion in other schemas
|
||||||
|
properties: Dict[str, Any] = {
|
||||||
|
"include": {
|
||||||
|
"type": "array",
|
||||||
|
"default": [],
|
||||||
|
"additionalProperties": False,
|
||||||
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"when": {"type": "string"},
|
||||||
|
"path": {"type": "string"},
|
||||||
|
"sha256": {"type": "string"},
|
||||||
|
"optional": {"type": "boolean"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
{"type": "string"},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#: Full schema with metadata
|
||||||
|
schema = {
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Spack include configuration file schema",
|
||||||
|
"properties": properties,
|
||||||
|
}
|
@@ -21,6 +21,7 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
|
import spack.schema.include
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -40,6 +41,7 @@
|
|||||||
spack.schema.definitions.properties,
|
spack.schema.definitions.properties,
|
||||||
spack.schema.develop.properties,
|
spack.schema.develop.properties,
|
||||||
spack.schema.env_vars.properties,
|
spack.schema.env_vars.properties,
|
||||||
|
spack.schema.include.properties,
|
||||||
spack.schema.mirrors.properties,
|
spack.schema.mirrors.properties,
|
||||||
spack.schema.modules.properties,
|
spack.schema.modules.properties,
|
||||||
spack.schema.packages.properties,
|
spack.schema.packages.properties,
|
||||||
@@ -48,7 +50,6 @@
|
|||||||
spack.schema.view.properties,
|
spack.schema.view.properties,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#: Full schema with metadata
|
#: Full schema with metadata
|
||||||
schema = {
|
schema = {
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
@@ -39,7 +39,7 @@
|
|||||||
"load": array_of_strings,
|
"load": array_of_strings,
|
||||||
"suffixes": {
|
"suffixes": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"validate_spec": True,
|
"additionalKeysAreSpecs": True,
|
||||||
"additionalProperties": {"type": "string"}, # key
|
"additionalProperties": {"type": "string"}, # key
|
||||||
},
|
},
|
||||||
"environment": spack.schema.environment.definition,
|
"environment": spack.schema.environment.definition,
|
||||||
@@ -48,11 +48,7 @@
|
|||||||
|
|
||||||
projections_scheme = spack.schema.projections.properties["projections"]
|
projections_scheme = spack.schema.projections.properties["projections"]
|
||||||
|
|
||||||
module_type_configuration: Dict = {
|
common_props = {
|
||||||
"type": "object",
|
|
||||||
"default": {},
|
|
||||||
"validate_spec": True,
|
|
||||||
"properties": {
|
|
||||||
"verbose": {"type": "boolean", "default": False},
|
"verbose": {"type": "boolean", "default": False},
|
||||||
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
||||||
"include": array_of_strings,
|
"include": array_of_strings,
|
||||||
@@ -63,25 +59,33 @@
|
|||||||
"naming_scheme": {"type": "string"},
|
"naming_scheme": {"type": "string"},
|
||||||
"projections": projections_scheme,
|
"projections": projections_scheme,
|
||||||
"all": module_file_configuration,
|
"all": module_file_configuration,
|
||||||
},
|
}
|
||||||
|
|
||||||
|
tcl_configuration = {
|
||||||
|
"type": "object",
|
||||||
|
"default": {},
|
||||||
|
"additionalKeysAreSpecs": True,
|
||||||
|
"properties": {**common_props},
|
||||||
"additionalProperties": module_file_configuration,
|
"additionalProperties": module_file_configuration,
|
||||||
}
|
}
|
||||||
|
|
||||||
tcl_configuration = module_type_configuration.copy()
|
lmod_configuration = {
|
||||||
|
"type": "object",
|
||||||
lmod_configuration = module_type_configuration.copy()
|
"default": {},
|
||||||
lmod_configuration["properties"].update(
|
"additionalKeysAreSpecs": True,
|
||||||
{
|
"properties": {
|
||||||
|
**common_props,
|
||||||
"core_compilers": array_of_strings,
|
"core_compilers": array_of_strings,
|
||||||
"hierarchy": array_of_strings,
|
"hierarchy": array_of_strings,
|
||||||
"core_specs": array_of_strings,
|
"core_specs": array_of_strings,
|
||||||
"filter_hierarchy_specs": {
|
"filter_hierarchy_specs": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"validate_spec": True,
|
"additionalKeysAreSpecs": True,
|
||||||
"additionalProperties": array_of_strings,
|
"additionalProperties": array_of_strings,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
)
|
"additionalProperties": module_file_configuration,
|
||||||
|
}
|
||||||
|
|
||||||
module_config_properties = {
|
module_config_properties = {
|
||||||
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
||||||
|
@@ -5,9 +5,12 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import enum
|
import enum
|
||||||
|
import errno
|
||||||
import functools
|
import functools
|
||||||
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import pprint
|
import pprint
|
||||||
@@ -17,12 +20,25 @@
|
|||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
from typing import (
|
||||||
|
IO,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
NamedTuple,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import current_file_position
|
||||||
from llnl.util.lang import elide_list
|
from llnl.util.lang import elide_list
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
@@ -34,21 +50,27 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.hash_types as ht
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
|
import spack.patch
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.solver.splicing
|
import spack.solver.splicing
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
|
import spack.util.hash
|
||||||
import spack.util.libc
|
import spack.util.libc
|
||||||
|
import spack.util.module_cmd as md
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.timer
|
import spack.util.timer
|
||||||
import spack.variant as vt
|
import spack.variant as vt
|
||||||
import spack.version as vn
|
import spack.version as vn
|
||||||
import spack.version.git_ref_lookup
|
import spack.version.git_ref_lookup
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
|
from spack.util.file_cache import FileCache
|
||||||
|
|
||||||
from .core import (
|
from .core import (
|
||||||
AspFunction,
|
AspFunction,
|
||||||
@@ -536,6 +558,364 @@ def format_unsolved(unsolved_specs):
|
|||||||
msg += "\n\t(No candidate specs from solver)"
|
msg += "\n\t(No candidate specs from solver)"
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
def to_dict(self, test: bool = False) -> dict:
|
||||||
|
"""Produces dict representation of Result object
|
||||||
|
|
||||||
|
Does not include anything related to unsatisfiability as we
|
||||||
|
are only interested in storing satisfiable results
|
||||||
|
"""
|
||||||
|
serial_node_arg = (
|
||||||
|
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
|
||||||
|
)
|
||||||
|
spec_hash_type = ht.process_hash if test else ht.dag_hash
|
||||||
|
ret = dict()
|
||||||
|
ret["asp"] = self.asp
|
||||||
|
ret["criteria"] = self.criteria
|
||||||
|
ret["optimal"] = self.optimal
|
||||||
|
ret["warnings"] = self.warnings
|
||||||
|
ret["nmodels"] = self.nmodels
|
||||||
|
ret["abstract_specs"] = [str(x) for x in self.abstract_specs]
|
||||||
|
ret["satisfiable"] = self.satisfiable
|
||||||
|
serial_answers = []
|
||||||
|
for answer in self.answers:
|
||||||
|
serial_answer = answer[:2]
|
||||||
|
serial_answer_dict = {}
|
||||||
|
for node, spec in answer[2].items():
|
||||||
|
serial_answer_dict[serial_node_arg(node)] = spec.to_dict(hash=spec_hash_type)
|
||||||
|
serial_answer = serial_answer + (serial_answer_dict,)
|
||||||
|
serial_answers.append(serial_answer)
|
||||||
|
ret["answers"] = serial_answers
|
||||||
|
ret["specs_by_input"] = {}
|
||||||
|
input_specs = {} if not self.specs_by_input else self.specs_by_input
|
||||||
|
for input, spec in input_specs.items():
|
||||||
|
ret["specs_by_input"][str(input)] = spec.to_dict(hash=spec_hash_type)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_dict(obj: dict):
|
||||||
|
"""Returns Result object from compatible dictionary"""
|
||||||
|
|
||||||
|
def _dict_to_node_argument(dict):
|
||||||
|
id = dict["id"]
|
||||||
|
pkg = dict["pkg"]
|
||||||
|
return NodeArgument(id=id, pkg=pkg)
|
||||||
|
|
||||||
|
def _str_to_spec(spec_str):
|
||||||
|
return spack.spec.Spec(spec_str)
|
||||||
|
|
||||||
|
def _dict_to_spec(spec_dict):
|
||||||
|
loaded_spec = spack.spec.Spec.from_dict(spec_dict)
|
||||||
|
_ensure_external_path_if_external(loaded_spec)
|
||||||
|
spack.spec.Spec.ensure_no_deprecated(loaded_spec)
|
||||||
|
return loaded_spec
|
||||||
|
|
||||||
|
asp = obj.get("asp")
|
||||||
|
spec_list = obj.get("abstract_specs")
|
||||||
|
if not spec_list:
|
||||||
|
raise RuntimeError("Invalid json for concretization Result object")
|
||||||
|
if spec_list:
|
||||||
|
spec_list = [_str_to_spec(x) for x in spec_list]
|
||||||
|
result = Result(spec_list, asp)
|
||||||
|
result.criteria = obj.get("criteria")
|
||||||
|
result.optimal = obj.get("optimal")
|
||||||
|
result.warnings = obj.get("warnings")
|
||||||
|
result.nmodels = obj.get("nmodels")
|
||||||
|
result.satisfiable = obj.get("satisfiable")
|
||||||
|
result._unsolved_specs = []
|
||||||
|
answers = []
|
||||||
|
for answer in obj.get("answers", []):
|
||||||
|
loaded_answer = answer[:2]
|
||||||
|
answer_node_dict = {}
|
||||||
|
for node, spec in answer[2].items():
|
||||||
|
answer_node_dict[_dict_to_node_argument(json.loads(node))] = _dict_to_spec(spec)
|
||||||
|
loaded_answer.append(answer_node_dict)
|
||||||
|
answers.append(tuple(loaded_answer))
|
||||||
|
result.answers = answers
|
||||||
|
result._concrete_specs_by_input = {}
|
||||||
|
result._concrete_specs = []
|
||||||
|
for input, spec in obj.get("specs_by_input", {}).items():
|
||||||
|
result._concrete_specs_by_input[_str_to_spec(input)] = _dict_to_spec(spec)
|
||||||
|
result._concrete_specs.append(_dict_to_spec(spec))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ConcretizationCache:
|
||||||
|
"""Store for Spack concretization results and statistics
|
||||||
|
|
||||||
|
Serializes solver result objects and statistics to json and stores
|
||||||
|
at a given endpoint in a cache associated by the sha256 of the
|
||||||
|
asp problem and the involved control files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, root: Union[str, None] = None):
|
||||||
|
root = root or spack.config.get(
|
||||||
|
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
||||||
|
)
|
||||||
|
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
|
||||||
|
self._fc = FileCache(self.root)
|
||||||
|
self._cache_manifest = ".cache_manifest"
|
||||||
|
self._manifest_queue: List[Tuple[pathlib.Path, int]] = []
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""Prunes the concretization cache according to configured size and entry
|
||||||
|
count limits. Cleanup is done in FIFO ordering."""
|
||||||
|
# TODO: determine a better default
|
||||||
|
entry_limit = spack.config.get("config:concretization_cache:entry_limit", 1000)
|
||||||
|
bytes_limit = spack.config.get("config:concretization_cache:size_limit", 3e8)
|
||||||
|
# lock the entire buildcache as we're removing a lot of data from the
|
||||||
|
# manifest and cache itself
|
||||||
|
with self._fc.read_transaction(self._cache_manifest) as f:
|
||||||
|
count, cache_bytes = self._extract_cache_metadata(f)
|
||||||
|
if not count or not cache_bytes:
|
||||||
|
return
|
||||||
|
entry_count = int(count)
|
||||||
|
manifest_bytes = int(cache_bytes)
|
||||||
|
# move beyond the metadata entry
|
||||||
|
f.readline()
|
||||||
|
if entry_count > entry_limit and entry_limit > 0:
|
||||||
|
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
||||||
|
# prune the oldest 10% or until we have removed 10% of
|
||||||
|
# total bytes starting from oldest entry
|
||||||
|
# TODO: make this configurable?
|
||||||
|
prune_count = entry_limit // 10
|
||||||
|
lines_to_prune = f.readlines(prune_count)
|
||||||
|
for i, line in enumerate(lines_to_prune):
|
||||||
|
sha, cache_entry_bytes = self._parse_manifest_entry(line)
|
||||||
|
if sha and cache_entry_bytes:
|
||||||
|
cache_path = self._cache_path_from_hash(sha)
|
||||||
|
if self._fc.remove(cache_path):
|
||||||
|
entry_count -= 1
|
||||||
|
manifest_bytes -= int(cache_entry_bytes)
|
||||||
|
else:
|
||||||
|
tty.warn(
|
||||||
|
f"Invalid concretization cache entry: '{line}' on line: {i+1}"
|
||||||
|
)
|
||||||
|
self._write_manifest(f, entry_count, manifest_bytes)
|
||||||
|
|
||||||
|
elif manifest_bytes > bytes_limit and bytes_limit > 0:
|
||||||
|
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
||||||
|
# take 10% of current size off
|
||||||
|
prune_amount = bytes_limit // 10
|
||||||
|
total_pruned = 0
|
||||||
|
i = 0
|
||||||
|
while total_pruned < prune_amount:
|
||||||
|
sha, manifest_cache_bytes = self._parse_manifest_entry(f.readline())
|
||||||
|
if sha and manifest_cache_bytes:
|
||||||
|
entry_bytes = int(manifest_cache_bytes)
|
||||||
|
cache_path = self.root / sha[:2] / sha
|
||||||
|
if self._safe_remove(cache_path):
|
||||||
|
entry_count -= 1
|
||||||
|
entry_bytes -= entry_bytes
|
||||||
|
total_pruned += entry_bytes
|
||||||
|
else:
|
||||||
|
tty.warn(
|
||||||
|
"Invalid concretization cache entry "
|
||||||
|
f"'{sha} {manifest_cache_bytes}' on line: {i}"
|
||||||
|
)
|
||||||
|
i += 1
|
||||||
|
self._write_manifest(f, entry_count, manifest_bytes)
|
||||||
|
for cache_dir in self.root.iterdir():
|
||||||
|
if cache_dir.is_dir() and not any(cache_dir.iterdir()):
|
||||||
|
self._safe_remove(cache_dir)
|
||||||
|
|
||||||
|
def cache_entries(self):
|
||||||
|
"""Generator producing cache entries"""
|
||||||
|
for cache_dir in self.root.iterdir():
|
||||||
|
# ensure component is cache entry directory
|
||||||
|
# not metadata file
|
||||||
|
if cache_dir.is_dir():
|
||||||
|
for cache_entry in cache_dir.iterdir():
|
||||||
|
if not cache_entry.is_dir():
|
||||||
|
yield cache_entry
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Improperly formed concretization cache. "
|
||||||
|
f"Directory {cache_entry.name} is improperly located "
|
||||||
|
"within the concretization cache."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_manifest_entry(self, line):
|
||||||
|
"""Returns parsed manifest entry lines
|
||||||
|
with handling for invalid reads."""
|
||||||
|
if line:
|
||||||
|
cache_values = line.strip("\n").split(" ")
|
||||||
|
if len(cache_values) < 2:
|
||||||
|
tty.warn(f"Invalid cache entry at {line}")
|
||||||
|
return None, None
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def _write_manifest(self, manifest_file, entry_count, entry_bytes):
|
||||||
|
"""Writes new concretization cache manifest file.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
manifest_file: IO stream opened for readin
|
||||||
|
and writing wrapping the manifest file
|
||||||
|
with cursor at calltime set to location
|
||||||
|
where manifest should be truncated
|
||||||
|
entry_count: new total entry count
|
||||||
|
entry_bytes: new total entry bytes count
|
||||||
|
|
||||||
|
"""
|
||||||
|
persisted_entries = manifest_file.readlines()
|
||||||
|
manifest_file.truncate(0)
|
||||||
|
manifest_file.write(f"{entry_count} {entry_bytes}\n")
|
||||||
|
manifest_file.writelines(persisted_entries)
|
||||||
|
|
||||||
|
def _results_from_cache(self, cache_entry_buffer: IO[str]) -> Union[Result, None]:
|
||||||
|
"""Returns a Results object from the concretizer cache
|
||||||
|
|
||||||
|
Reads the cache hit and uses `Result`'s own deserializer
|
||||||
|
to produce a new Result object
|
||||||
|
"""
|
||||||
|
|
||||||
|
with current_file_position(cache_entry_buffer, 0):
|
||||||
|
cache_str = cache_entry_buffer.read()
|
||||||
|
# TODO: Should this be an error if None?
|
||||||
|
# Same for _stats_from_cache
|
||||||
|
if cache_str:
|
||||||
|
cache_entry = json.loads(cache_str)
|
||||||
|
result_json = cache_entry["results"]
|
||||||
|
return Result.from_dict(result_json)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _stats_from_cache(self, cache_entry_buffer: IO[str]) -> Union[List, None]:
|
||||||
|
"""Returns concretization statistic from the
|
||||||
|
concretization associated with the cache.
|
||||||
|
|
||||||
|
Deserialzes the the json representation of the
|
||||||
|
statistics covering the cached concretization run
|
||||||
|
and returns the Python data structures
|
||||||
|
"""
|
||||||
|
with current_file_position(cache_entry_buffer, 0):
|
||||||
|
cache_str = cache_entry_buffer.read()
|
||||||
|
if cache_str:
|
||||||
|
return json.loads(cache_str)["statistics"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _extract_cache_metadata(self, cache_stream: IO[str]):
|
||||||
|
"""Extracts and returns cache entry count and bytes count from head of manifest
|
||||||
|
file"""
|
||||||
|
# make sure we're always reading from the beginning of the stream
|
||||||
|
# concretization cache manifest data lives at the top of the file
|
||||||
|
with current_file_position(cache_stream, 0):
|
||||||
|
return self._parse_manifest_entry(cache_stream.readline())
|
||||||
|
|
||||||
|
def _prefix_digest(self, problem: str) -> Tuple[str, str]:
|
||||||
|
"""Return the first two characters of, and the full, sha256 of the given asp problem"""
|
||||||
|
prob_digest = hashlib.sha256(problem.encode()).hexdigest()
|
||||||
|
prefix = prob_digest[:2]
|
||||||
|
return prefix, prob_digest
|
||||||
|
|
||||||
|
def _cache_path_from_problem(self, problem: str) -> pathlib.Path:
|
||||||
|
"""Returns a Path object representing the path to the cache
|
||||||
|
entry for the given problem"""
|
||||||
|
prefix, digest = self._prefix_digest(problem)
|
||||||
|
return pathlib.Path(prefix) / digest
|
||||||
|
|
||||||
|
def _cache_path_from_hash(self, hash: str) -> pathlib.Path:
|
||||||
|
"""Returns a Path object representing the cache entry
|
||||||
|
corresponding to the given sha256 hash"""
|
||||||
|
return pathlib.Path(hash[:2]) / hash
|
||||||
|
|
||||||
|
def _lock_prefix_from_cache_path(self, cache_path: str):
|
||||||
|
"""Returns the bit location corresponding to a given cache entry path
|
||||||
|
for file locking"""
|
||||||
|
return spack.util.hash.base32_prefix_bits(
|
||||||
|
spack.util.hash.b32_hash(cache_path), spack.util.crypto.bit_length(sys.maxsize)
|
||||||
|
)
|
||||||
|
|
||||||
|
def flush_manifest(self):
|
||||||
|
"""Updates the concretization cache manifest file after a cache write operation
|
||||||
|
Updates the current byte count and entry counts and writes to the head of the
|
||||||
|
manifest file"""
|
||||||
|
manifest_file = self.root / self._cache_manifest
|
||||||
|
manifest_file.touch(exist_ok=True)
|
||||||
|
with open(manifest_file, "r+", encoding="utf-8") as f:
|
||||||
|
# check if manifest is empty
|
||||||
|
count, cache_bytes = self._extract_cache_metadata(f)
|
||||||
|
if not count or not cache_bytes:
|
||||||
|
# cache is unintialized
|
||||||
|
count = 0
|
||||||
|
cache_bytes = 0
|
||||||
|
f.seek(0, io.SEEK_END)
|
||||||
|
for manifest_update in self._manifest_queue:
|
||||||
|
entry_path, entry_bytes = manifest_update
|
||||||
|
count += 1
|
||||||
|
cache_bytes += entry_bytes
|
||||||
|
f.write(f"{entry_path.name} {entry_bytes}")
|
||||||
|
f.seek(0, io.SEEK_SET)
|
||||||
|
new_stats = f"{int(count)+1} {int(cache_bytes)}\n"
|
||||||
|
f.write(new_stats)
|
||||||
|
|
||||||
|
def _register_cache_update(self, cache_path: pathlib.Path, bytes_written: int):
|
||||||
|
"""Adds manifest entry to update queue for later updates to the manifest"""
|
||||||
|
self._manifest_queue.append((cache_path, bytes_written))
|
||||||
|
|
||||||
|
def _safe_remove(self, cache_dir: pathlib.Path):
|
||||||
|
"""Removes cache entries with handling for the case where the entry has been
|
||||||
|
removed already or there are multiple cache entries in a directory"""
|
||||||
|
try:
|
||||||
|
if cache_dir.is_dir():
|
||||||
|
cache_dir.rmdir()
|
||||||
|
else:
|
||||||
|
cache_dir.unlink()
|
||||||
|
return True
|
||||||
|
except FileNotFoundError:
|
||||||
|
# This is acceptable, removal is idempotent
|
||||||
|
pass
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOTEMPTY:
|
||||||
|
# there exists another cache entry in this directory, don't clean yet
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def store(self, problem: str, result: Result, statistics: List, test: bool = False):
|
||||||
|
"""Creates entry in concretization cache for problem if none exists,
|
||||||
|
storing the concretization Result object and statistics in the cache
|
||||||
|
as serialized json joined as a single file.
|
||||||
|
|
||||||
|
Hash membership is computed based on the sha256 of the provided asp
|
||||||
|
problem.
|
||||||
|
"""
|
||||||
|
cache_path = self._cache_path_from_problem(problem)
|
||||||
|
if self._fc.init_entry(cache_path):
|
||||||
|
# if an entry for this conc hash exists already, we're don't want
|
||||||
|
# to overwrite, just exit
|
||||||
|
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
||||||
|
return
|
||||||
|
with self._fc.write_transaction(cache_path) as (old, new):
|
||||||
|
if old:
|
||||||
|
# Entry for this conc hash exists already, do not overwrite
|
||||||
|
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
||||||
|
return
|
||||||
|
cache_dict = {"results": result.to_dict(test=test), "statistics": statistics}
|
||||||
|
bytes_written = new.write(json.dumps(cache_dict))
|
||||||
|
self._register_cache_update(cache_path, bytes_written)
|
||||||
|
|
||||||
|
def fetch(self, problem: str) -> Union[Tuple[Result, List], Tuple[None, None]]:
|
||||||
|
"""Returns the concretization cache result for a lookup based on the given problem.
|
||||||
|
|
||||||
|
Checks the concretization cache for the given problem, and either returns the
|
||||||
|
Python objects cached on disk representing the concretization results and statistics
|
||||||
|
or returns none if no cache entry was found.
|
||||||
|
"""
|
||||||
|
cache_path = self._cache_path_from_problem(problem)
|
||||||
|
result, statistics = None, None
|
||||||
|
with self._fc.read_transaction(cache_path) as f:
|
||||||
|
if f:
|
||||||
|
result = self._results_from_cache(f)
|
||||||
|
statistics = self._stats_from_cache(f)
|
||||||
|
if result and statistics:
|
||||||
|
tty.debug(f"Concretization cache hit at {str(cache_path)}")
|
||||||
|
return result, statistics
|
||||||
|
tty.debug(f"Concretization cache miss at {str(cache_path)}")
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
CONC_CACHE: ConcretizationCache = llnl.util.lang.Singleton(
|
||||||
|
lambda: ConcretizationCache()
|
||||||
|
) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _normalize_packages_yaml(packages_yaml):
|
def _normalize_packages_yaml(packages_yaml):
|
||||||
normalized_yaml = copy.copy(packages_yaml)
|
normalized_yaml = copy.copy(packages_yaml)
|
||||||
@@ -804,6 +1184,15 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||||
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||||
|
control_files = ["concretize.lp", "heuristic.lp", "display.lp"]
|
||||||
|
if not setup.concretize_everything:
|
||||||
|
control_files.append("when_possible.lp")
|
||||||
|
if using_libc_compatibility():
|
||||||
|
control_files.append("libc_compatibility.lp")
|
||||||
|
else:
|
||||||
|
control_files.append("os_compatibility.lp")
|
||||||
|
if setup.enable_splicing:
|
||||||
|
control_files.append("splices.lp")
|
||||||
|
|
||||||
timer.start("setup")
|
timer.start("setup")
|
||||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||||
@@ -813,25 +1202,30 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
return Result(specs), None, None
|
return Result(specs), None, None
|
||||||
timer.stop("setup")
|
timer.stop("setup")
|
||||||
|
|
||||||
|
timer.start("cache-check")
|
||||||
|
timer.start("ordering")
|
||||||
|
# ensure deterministic output
|
||||||
|
problem_repr = "\n".join(sorted(asp_problem.split("\n")))
|
||||||
|
timer.stop("ordering")
|
||||||
|
parent_dir = os.path.dirname(__file__)
|
||||||
|
full_path = lambda x: os.path.join(parent_dir, x)
|
||||||
|
abs_control_files = [full_path(x) for x in control_files]
|
||||||
|
for ctrl_file in abs_control_files:
|
||||||
|
with open(ctrl_file, "r+", encoding="utf-8") as f:
|
||||||
|
problem_repr += "\n" + f.read()
|
||||||
|
|
||||||
|
result = None
|
||||||
|
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
|
||||||
|
if conc_cache_enabled:
|
||||||
|
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
|
||||||
|
|
||||||
|
timer.stop("cache-check")
|
||||||
|
if not result:
|
||||||
timer.start("load")
|
timer.start("load")
|
||||||
# Add the problem instance
|
# Add the problem instance
|
||||||
self.control.add("base", [], asp_problem)
|
self.control.add("base", [], asp_problem)
|
||||||
# Load the file itself
|
# Load the files
|
||||||
parent_dir = os.path.dirname(__file__)
|
[self.control.load(lp) for lp in abs_control_files]
|
||||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
|
||||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
|
||||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
|
||||||
if not setup.concretize_everything:
|
|
||||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
|
||||||
|
|
||||||
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
|
|
||||||
if using_libc_compatibility():
|
|
||||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
|
||||||
else:
|
|
||||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
|
||||||
if setup.enable_splicing:
|
|
||||||
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
|
||||||
|
|
||||||
timer.stop("load")
|
timer.stop("load")
|
||||||
|
|
||||||
# Grounding is the first step in the solve -- it turns our facts
|
# Grounding is the first step in the solve -- it turns our facts
|
||||||
@@ -866,7 +1260,9 @@ def on_model(model):
|
|||||||
finished = handle.wait(time_limit)
|
finished = handle.wait(time_limit)
|
||||||
if not finished:
|
if not finished:
|
||||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||||
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
header = (
|
||||||
|
f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||||
|
)
|
||||||
if error_on_timeout:
|
if error_on_timeout:
|
||||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||||
warnings.warn(f"{header}, using the best configuration found so far")
|
warnings.warn(f"{header}, using the best configuration found so far")
|
||||||
@@ -890,7 +1286,9 @@ def on_model(model):
|
|||||||
error_handler.raise_if_errors()
|
error_handler.raise_if_errors()
|
||||||
|
|
||||||
# build specs from spec attributes in the model
|
# build specs from spec attributes in the model
|
||||||
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
spec_attrs = [
|
||||||
|
(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")
|
||||||
|
]
|
||||||
answers = builder.build_specs(spec_attrs)
|
answers = builder.build_specs(spec_attrs)
|
||||||
|
|
||||||
# add best spec to the results
|
# add best spec to the results
|
||||||
@@ -911,14 +1309,6 @@ def on_model(model):
|
|||||||
result.control = self.control
|
result.control = self.control
|
||||||
result.cores.extend(cores)
|
result.cores.extend(cores)
|
||||||
|
|
||||||
if output.timers:
|
|
||||||
timer.write_tty()
|
|
||||||
print()
|
|
||||||
|
|
||||||
if output.stats:
|
|
||||||
print("Statistics:")
|
|
||||||
pprint.pprint(self.control.statistics)
|
|
||||||
|
|
||||||
result.raise_if_unsat()
|
result.raise_if_unsat()
|
||||||
|
|
||||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||||
@@ -928,8 +1318,17 @@ def on_model(model):
|
|||||||
" that do not satisfy the request. Please report a bug at "
|
" that do not satisfy the request. Please report a bug at "
|
||||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||||
)
|
)
|
||||||
|
if conc_cache_enabled:
|
||||||
|
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
||||||
|
concretization_stats = self.control.statistics
|
||||||
|
if output.timers:
|
||||||
|
timer.write_tty()
|
||||||
|
print()
|
||||||
|
|
||||||
return result, timer, self.control.statistics
|
if output.stats:
|
||||||
|
print("Statistics:")
|
||||||
|
pprint.pprint(concretization_stats)
|
||||||
|
return result, timer, concretization_stats
|
||||||
|
|
||||||
|
|
||||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||||
@@ -1371,7 +1770,7 @@ def effect_rules(self):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.gen.h2("Imposed requirements")
|
self.gen.h2("Imposed requirements")
|
||||||
for name in self._effect_cache:
|
for name in sorted(self._effect_cache):
|
||||||
cache = self._effect_cache[name]
|
cache = self._effect_cache[name]
|
||||||
for (spec_str, _), (effect_id, requirements) in cache.items():
|
for (spec_str, _), (effect_id, requirements) in cache.items():
|
||||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||||
@@ -1424,8 +1823,8 @@ def define_variant(
|
|||||||
|
|
||||||
elif isinstance(values, vt.DisjointSetsOfValues):
|
elif isinstance(values, vt.DisjointSetsOfValues):
|
||||||
union = set()
|
union = set()
|
||||||
for sid, s in enumerate(values.sets):
|
for sid, s in enumerate(sorted(values.sets)):
|
||||||
for value in s:
|
for value in sorted(s):
|
||||||
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
||||||
union.update(s)
|
union.update(s)
|
||||||
values = union
|
values = union
|
||||||
@@ -1606,7 +2005,7 @@ def package_provider_rules(self, pkg):
|
|||||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
||||||
|
|
||||||
for when, provided in pkg.provided.items():
|
for when, provided in pkg.provided.items():
|
||||||
for vpkg in provided:
|
for vpkg in sorted(provided):
|
||||||
if vpkg.name not in self.possible_virtuals:
|
if vpkg.name not in self.possible_virtuals:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1621,8 +2020,8 @@ def package_provider_rules(self, pkg):
|
|||||||
condition_id = self.condition(
|
condition_id = self.condition(
|
||||||
when, required_name=pkg.name, msg="Virtuals are provided together"
|
when, required_name=pkg.name, msg="Virtuals are provided together"
|
||||||
)
|
)
|
||||||
for set_id, virtuals_together in enumerate(sets_of_virtuals):
|
for set_id, virtuals_together in enumerate(sorted(sets_of_virtuals)):
|
||||||
for name in virtuals_together:
|
for name in sorted(virtuals_together):
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
||||||
)
|
)
|
||||||
@@ -1656,13 +2055,16 @@ def track_dependencies(input_spec, requirements):
|
|||||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||||
|
|
||||||
def dependency_holds(input_spec, requirements):
|
def dependency_holds(input_spec, requirements):
|
||||||
return remove_node(input_spec, requirements) + [
|
result = remove_node(input_spec, requirements) + [
|
||||||
fn.attr(
|
fn.attr(
|
||||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||||
)
|
)
|
||||||
for t in dt.ALL_FLAGS
|
for t in dt.ALL_FLAGS
|
||||||
if t & depflag
|
if t & depflag
|
||||||
]
|
]
|
||||||
|
if input_spec.name not in pkg.extendees:
|
||||||
|
return result
|
||||||
|
return result + [fn.attr("extends", pkg.name, input_spec.name)]
|
||||||
|
|
||||||
context = ConditionContext()
|
context = ConditionContext()
|
||||||
context.source = ConstraintOrigin.append_type_suffix(
|
context.source = ConstraintOrigin.append_type_suffix(
|
||||||
@@ -1729,7 +2131,7 @@ def package_splice_rules(self, pkg):
|
|||||||
for map in pkg.variants.values():
|
for map in pkg.variants.values():
|
||||||
for k in map:
|
for k in map:
|
||||||
filt_match_variants.add(k)
|
filt_match_variants.add(k)
|
||||||
filt_match_variants = list(filt_match_variants)
|
filt_match_variants = sorted(filt_match_variants)
|
||||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||||
)
|
)
|
||||||
@@ -2259,7 +2661,7 @@ def define_package_versions_and_validate_preferences(
|
|||||||
):
|
):
|
||||||
"""Declare any versions in specs not declared in packages."""
|
"""Declare any versions in specs not declared in packages."""
|
||||||
packages_yaml = spack.config.get("packages")
|
packages_yaml = spack.config.get("packages")
|
||||||
for pkg_name in possible_pkgs:
|
for pkg_name in sorted(possible_pkgs):
|
||||||
pkg_cls = self.pkg_class(pkg_name)
|
pkg_cls = self.pkg_class(pkg_name)
|
||||||
|
|
||||||
# All the versions from the corresponding package.py file. Since concepts
|
# All the versions from the corresponding package.py file. Since concepts
|
||||||
@@ -2587,7 +2989,7 @@ def define_variant_values(self):
|
|||||||
"""
|
"""
|
||||||
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
||||||
# We might want to order these facts by pkg and name if we are debugging.
|
# We might want to order these facts by pkg and name if we are debugging.
|
||||||
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
|
for pkg_name, variant_def_id, value in sorted(self.variant_values_from_specs):
|
||||||
try:
|
try:
|
||||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -2625,6 +3027,8 @@ def concrete_specs(self):
|
|||||||
# Declare as possible parts of specs that are not in package.py
|
# Declare as possible parts of specs that are not in package.py
|
||||||
# - Add versions to possible versions
|
# - Add versions to possible versions
|
||||||
# - Add OS to possible OS's
|
# - Add OS to possible OS's
|
||||||
|
|
||||||
|
# is traverse deterministic?
|
||||||
for dep in spec.traverse():
|
for dep in spec.traverse():
|
||||||
self.possible_versions[dep.name].add(dep.version)
|
self.possible_versions[dep.name].add(dep.version)
|
||||||
if isinstance(dep.version, vn.GitVersion):
|
if isinstance(dep.version, vn.GitVersion):
|
||||||
@@ -2862,7 +3266,7 @@ def define_runtime_constraints(self):
|
|||||||
recorder.consume_facts()
|
recorder.consume_facts()
|
||||||
|
|
||||||
def literal_specs(self, specs):
|
def literal_specs(self, specs):
|
||||||
for spec in specs:
|
for spec in sorted(specs):
|
||||||
self.gen.h2("Spec: %s" % str(spec))
|
self.gen.h2("Spec: %s" % str(spec))
|
||||||
condition_id = next(self._id_counter)
|
condition_id = next(self._id_counter)
|
||||||
trigger_id = next(self._id_counter)
|
trigger_id = next(self._id_counter)
|
||||||
@@ -3363,7 +3767,7 @@ def consume_facts(self):
|
|||||||
# on the available compilers)
|
# on the available compilers)
|
||||||
self._setup.pkg_version_rules(runtime_pkg)
|
self._setup.pkg_version_rules(runtime_pkg)
|
||||||
|
|
||||||
for imposed_spec, when_spec in self.runtime_conditions:
|
for imposed_spec, when_spec in sorted(self.runtime_conditions):
|
||||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||||
|
|
||||||
@@ -3702,11 +4106,11 @@ def build_specs(self, function_tuples):
|
|||||||
roots = [spec.root for spec in self._specs.values()]
|
roots = [spec.root for spec in self._specs.values()]
|
||||||
roots = dict((id(r), r) for r in roots)
|
roots = dict((id(r), r) for r in roots)
|
||||||
for root in roots.values():
|
for root in roots.values():
|
||||||
spack.spec.Spec.inject_patches_variant(root)
|
_inject_patches_variant(root)
|
||||||
|
|
||||||
# Add external paths to specs with just external modules
|
# Add external paths to specs with just external modules
|
||||||
for s in self._specs.values():
|
for s in self._specs.values():
|
||||||
spack.spec.Spec.ensure_external_path_if_external(s)
|
_ensure_external_path_if_external(s)
|
||||||
|
|
||||||
for s in self._specs.values():
|
for s in self._specs.values():
|
||||||
_develop_specs_from_env(s, ev.active_environment())
|
_develop_specs_from_env(s, ev.active_environment())
|
||||||
@@ -3778,6 +4182,92 @@ def execute_explicit_splices(self):
|
|||||||
return specs
|
return specs
|
||||||
|
|
||||||
|
|
||||||
|
def _inject_patches_variant(root: spack.spec.Spec) -> None:
|
||||||
|
# This dictionary will store object IDs rather than Specs as keys
|
||||||
|
# since the Spec __hash__ will change as patches are added to them
|
||||||
|
spec_to_patches: Dict[int, Set[spack.patch.Patch]] = {}
|
||||||
|
for s in root.traverse():
|
||||||
|
# After concretizing, assign namespaces to anything left.
|
||||||
|
# Note that this doesn't count as a "change". The repository
|
||||||
|
# configuration is constant throughout a spack run, and
|
||||||
|
# normalize and concretize evaluate Packages using Repo.get(),
|
||||||
|
# which respects precedence. So, a namespace assignment isn't
|
||||||
|
# changing how a package name would have been interpreted and
|
||||||
|
# we can do it as late as possible to allow as much
|
||||||
|
# compatibility across repositories as possible.
|
||||||
|
if s.namespace is None:
|
||||||
|
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
||||||
|
|
||||||
|
if s.concrete:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add any patches from the package to the spec.
|
||||||
|
node_patches = {
|
||||||
|
patch
|
||||||
|
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items()
|
||||||
|
if s.satisfies(cond)
|
||||||
|
for patch in patch_list
|
||||||
|
}
|
||||||
|
if node_patches:
|
||||||
|
spec_to_patches[id(s)] = node_patches
|
||||||
|
|
||||||
|
# Also record all patches required on dependencies by depends_on(..., patch=...)
|
||||||
|
for dspec in root.traverse_edges(deptype=dt.ALL, cover="edges", root=False):
|
||||||
|
if dspec.spec.concrete:
|
||||||
|
continue
|
||||||
|
|
||||||
|
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
|
||||||
|
|
||||||
|
edge_patches: List[spack.patch.Patch] = []
|
||||||
|
for cond, deps_by_name in pkg_deps.items():
|
||||||
|
if not dspec.parent.satisfies(cond):
|
||||||
|
continue
|
||||||
|
|
||||||
|
dependency = deps_by_name.get(dspec.spec.name)
|
||||||
|
if not dependency:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for pcond, patch_list in dependency.patches.items():
|
||||||
|
if dspec.spec.satisfies(pcond):
|
||||||
|
edge_patches.extend(patch_list)
|
||||||
|
|
||||||
|
if edge_patches:
|
||||||
|
spec_to_patches.setdefault(id(dspec.spec), set()).update(edge_patches)
|
||||||
|
|
||||||
|
for spec in root.traverse():
|
||||||
|
if id(spec) not in spec_to_patches:
|
||||||
|
continue
|
||||||
|
|
||||||
|
patches = list(spec_to_patches[id(spec)])
|
||||||
|
variant: vt.MultiValuedVariant = spec.variants.setdefault(
|
||||||
|
"patches", vt.MultiValuedVariant("patches", ())
|
||||||
|
)
|
||||||
|
variant.value = tuple(p.sha256 for p in patches)
|
||||||
|
# FIXME: Monkey patches variant to store patches order
|
||||||
|
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
|
||||||
|
ordered_hashes.sort()
|
||||||
|
tty.debug(
|
||||||
|
f"Ordered hashes [{spec.name}]: "
|
||||||
|
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
|
||||||
|
)
|
||||||
|
setattr(
|
||||||
|
variant, "_patches_in_order_of_appearance", [sha256 for _, _, sha256 in ordered_hashes]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_external_path_if_external(spec: spack.spec.Spec) -> None:
|
||||||
|
if not spec.external_modules or spec.external_path:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the path from the module the package can override the default
|
||||||
|
# (this is mostly needed for Cray)
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
|
package = pkg_cls(spec)
|
||||||
|
spec.external_path = getattr(package, "external_prefix", None) or md.path_from_modules(
|
||||||
|
spec.external_modules
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _develop_specs_from_env(spec, env):
|
def _develop_specs_from_env(spec, env):
|
||||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||||
if not dev_info:
|
if not dev_info:
|
||||||
@@ -4134,6 +4624,9 @@ def solve_with_stats(
|
|||||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests)
|
||||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||||
|
|
||||||
|
CONC_CACHE.flush_manifest()
|
||||||
|
CONC_CACHE.cleanup()
|
||||||
return self.driver.solve(
|
return self.driver.solve(
|
||||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||||
)
|
)
|
||||||
@@ -4203,6 +4696,9 @@ def solve_in_rounds(
|
|||||||
for spec in result.specs:
|
for spec in result.specs:
|
||||||
reusable_specs.extend(spec.traverse())
|
reusable_specs.extend(spec.traverse())
|
||||||
|
|
||||||
|
CONC_CACHE.flush_manifest()
|
||||||
|
CONC_CACHE.cleanup()
|
||||||
|
|
||||||
|
|
||||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||||
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
||||||
|
@@ -524,6 +524,16 @@ error(10, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
|||||||
:- attr("node", node(ID, Package)),
|
:- attr("node", node(ID, Package)),
|
||||||
not needed(node(ID, Package)).
|
not needed(node(ID, Package)).
|
||||||
|
|
||||||
|
|
||||||
|
% Extensions depending on each other must all extend the same node (e.g. all Python packages
|
||||||
|
% depending on each other must depend on the same Python interpreter)
|
||||||
|
error(100, "{0} and {1} must depend on the same {2}", ExtensionParent, ExtensionChild, ExtendeePackage)
|
||||||
|
:- depends_on(ExtensionParent, ExtensionChild),
|
||||||
|
attr("extends", ExtensionParent, ExtendeePackage),
|
||||||
|
depends_on(ExtensionParent, node(X, ExtendeePackage)),
|
||||||
|
depends_on(ExtensionChild, node(Y, ExtendeePackage)),
|
||||||
|
X != Y.
|
||||||
|
|
||||||
#defined dependency_type/2.
|
#defined dependency_type/2.
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
|
@@ -99,7 +99,6 @@
|
|||||||
import spack.traverse
|
import spack.traverse
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.hash
|
import spack.util.hash
|
||||||
import spack.util.module_cmd as md
|
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -799,7 +798,7 @@ def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
|||||||
self.depflag = new
|
self.depflag = new
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
def update_virtuals(self, virtuals: Iterable[str]) -> bool:
|
||||||
"""Update the list of provided virtuals"""
|
"""Update the list of provided virtuals"""
|
||||||
old = self.virtuals
|
old = self.virtuals
|
||||||
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
||||||
@@ -2119,20 +2118,20 @@ def cshort_spec(self):
|
|||||||
return self.cformat(spec_format)
|
return self.cformat(spec_format)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def prefix(self):
|
def prefix(self) -> spack.util.prefix.Prefix:
|
||||||
if not self._concrete:
|
if not self._concrete:
|
||||||
raise spack.error.SpecError("Spec is not concrete: " + str(self))
|
raise spack.error.SpecError(f"Spec is not concrete: {self}")
|
||||||
|
|
||||||
if self._prefix is None:
|
if self._prefix is None:
|
||||||
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
_, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
||||||
if record and record.path:
|
if record and record.path:
|
||||||
self.prefix = record.path
|
self.set_prefix(record.path)
|
||||||
else:
|
else:
|
||||||
self.prefix = spack.store.STORE.layout.path_for_spec(self)
|
self.set_prefix(spack.store.STORE.layout.path_for_spec(self))
|
||||||
|
assert self._prefix is not None
|
||||||
return self._prefix
|
return self._prefix
|
||||||
|
|
||||||
@prefix.setter
|
def set_prefix(self, value: str) -> None:
|
||||||
def prefix(self, value):
|
|
||||||
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
|
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
|
||||||
|
|
||||||
def spec_hash(self, hash):
|
def spec_hash(self, hash):
|
||||||
@@ -2738,7 +2737,7 @@ def spec_and_dependency_types(
|
|||||||
return spec_builder(spec_dict)
|
return spec_builder(spec_dict)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(data):
|
def from_dict(data) -> "Spec":
|
||||||
"""Construct a spec from JSON/YAML.
|
"""Construct a spec from JSON/YAML.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2761,7 +2760,7 @@ def from_dict(data):
|
|||||||
return spec
|
return spec
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_yaml(stream):
|
def from_yaml(stream) -> "Spec":
|
||||||
"""Construct a spec from YAML.
|
"""Construct a spec from YAML.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2771,7 +2770,7 @@ def from_yaml(stream):
|
|||||||
return Spec.from_dict(data)
|
return Spec.from_dict(data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_json(stream):
|
def from_json(stream) -> "Spec":
|
||||||
"""Construct a spec from JSON.
|
"""Construct a spec from JSON.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2781,7 +2780,7 @@ def from_json(stream):
|
|||||||
data = sjson.load(stream)
|
data = sjson.load(stream)
|
||||||
return Spec.from_dict(data)
|
return Spec.from_dict(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
|
raise sjson.SpackJSONError("error parsing JSON spec:", e) from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def extract_json_from_clearsig(data):
|
def extract_json_from_clearsig(data):
|
||||||
@@ -2845,94 +2844,6 @@ def _patches_assigned(self):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def inject_patches_variant(root):
|
|
||||||
# This dictionary will store object IDs rather than Specs as keys
|
|
||||||
# since the Spec __hash__ will change as patches are added to them
|
|
||||||
spec_to_patches = {}
|
|
||||||
for s in root.traverse():
|
|
||||||
# After concretizing, assign namespaces to anything left.
|
|
||||||
# Note that this doesn't count as a "change". The repository
|
|
||||||
# configuration is constant throughout a spack run, and
|
|
||||||
# normalize and concretize evaluate Packages using Repo.get(),
|
|
||||||
# which respects precedence. So, a namespace assignment isn't
|
|
||||||
# changing how a package name would have been interpreted and
|
|
||||||
# we can do it as late as possible to allow as much
|
|
||||||
# compatibility across repositories as possible.
|
|
||||||
if s.namespace is None:
|
|
||||||
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
|
||||||
|
|
||||||
if s.concrete:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add any patches from the package to the spec.
|
|
||||||
patches = set()
|
|
||||||
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
|
|
||||||
if s.satisfies(cond):
|
|
||||||
for patch in patch_list:
|
|
||||||
patches.add(patch)
|
|
||||||
if patches:
|
|
||||||
spec_to_patches[id(s)] = patches
|
|
||||||
|
|
||||||
# Also record all patches required on dependencies by
|
|
||||||
# depends_on(..., patch=...)
|
|
||||||
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
|
||||||
if dspec.spec.concrete:
|
|
||||||
continue
|
|
||||||
|
|
||||||
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
|
|
||||||
|
|
||||||
patches = []
|
|
||||||
for cond, deps_by_name in pkg_deps.items():
|
|
||||||
if not dspec.parent.satisfies(cond):
|
|
||||||
continue
|
|
||||||
|
|
||||||
dependency = deps_by_name.get(dspec.spec.name)
|
|
||||||
if not dependency:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for pcond, patch_list in dependency.patches.items():
|
|
||||||
if dspec.spec.satisfies(pcond):
|
|
||||||
patches.extend(patch_list)
|
|
||||||
|
|
||||||
if patches:
|
|
||||||
all_patches = spec_to_patches.setdefault(id(dspec.spec), set())
|
|
||||||
for patch in patches:
|
|
||||||
all_patches.add(patch)
|
|
||||||
|
|
||||||
for spec in root.traverse():
|
|
||||||
if id(spec) not in spec_to_patches:
|
|
||||||
continue
|
|
||||||
|
|
||||||
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
|
|
||||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
|
||||||
mvar.value = tuple(p.sha256 for p in patches)
|
|
||||||
# FIXME: Monkey patches mvar to store patches order
|
|
||||||
full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
|
|
||||||
ordered_hashes = sorted(full_order_keys)
|
|
||||||
tty.debug(
|
|
||||||
"Ordered hashes [{0}]: ".format(spec.name)
|
|
||||||
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
|
|
||||||
)
|
|
||||||
mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def ensure_external_path_if_external(external_spec):
|
|
||||||
if external_spec.external_modules and not external_spec.external_path:
|
|
||||||
compiler = spack.compilers.compiler_for_spec(
|
|
||||||
external_spec.compiler, external_spec.architecture
|
|
||||||
)
|
|
||||||
for mod in compiler.modules:
|
|
||||||
md.load_module(mod)
|
|
||||||
|
|
||||||
# Get the path from the module the package can override the default
|
|
||||||
# (this is mostly needed for Cray)
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
|
|
||||||
package = pkg_cls(external_spec)
|
|
||||||
external_spec.external_path = getattr(
|
|
||||||
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ensure_no_deprecated(root):
|
def ensure_no_deprecated(root):
|
||||||
"""Raise if a deprecated spec is in the dag.
|
"""Raise if a deprecated spec is in the dag.
|
||||||
@@ -4704,17 +4615,6 @@ def constrain(self, other: "VariantMap") -> bool:
|
|||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
@property
|
|
||||||
def concrete(self):
|
|
||||||
"""Returns True if the spec is concrete in terms of variants.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True or False
|
|
||||||
"""
|
|
||||||
return self.spec._concrete or all(
|
|
||||||
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
|
|
||||||
)
|
|
||||||
|
|
||||||
def copy(self) -> "VariantMap":
|
def copy(self) -> "VariantMap":
|
||||||
clone = VariantMap(self.spec)
|
clone = VariantMap(self.spec)
|
||||||
for name, variant in self.items():
|
for name, variant in self.items():
|
||||||
@@ -4841,33 +4741,51 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
|||||||
return merged_spec
|
return merged_spec
|
||||||
|
|
||||||
|
|
||||||
def reconstruct_virtuals_on_edges(spec):
|
def reconstruct_virtuals_on_edges(spec: Spec) -> None:
|
||||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
"""Reconstruct virtuals on edges. Used to read from old DB and reindex."""
|
||||||
|
virtuals_needed: Dict[str, Set[str]] = {}
|
||||||
Args:
|
virtuals_provided: Dict[str, Set[str]] = {}
|
||||||
spec: spec on which we want to reconstruct virtuals
|
for edge in spec.traverse_edges(cover="edges", root=False):
|
||||||
"""
|
parent_key = edge.parent.dag_hash()
|
||||||
# Collect all possible virtuals
|
if parent_key not in virtuals_needed:
|
||||||
possible_virtuals = set()
|
# Construct which virtuals are needed by parent
|
||||||
for node in spec.traverse():
|
virtuals_needed[parent_key] = set()
|
||||||
try:
|
try:
|
||||||
possible_virtuals.update(
|
parent_pkg = edge.parent.package
|
||||||
{x for x in node.package.dependencies if spack.repo.PATH.is_virtual(x)}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
warnings.warn(
|
||||||
|
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Assume all incoming edges to provider are marked with virtuals=
|
virtuals_needed[parent_key].update(
|
||||||
for vspec in possible_virtuals:
|
name
|
||||||
|
for name, when_deps in parent_pkg.dependencies_by_name(when=True).items()
|
||||||
|
if spack.repo.PATH.is_virtual(name)
|
||||||
|
and any(edge.parent.satisfies(x) for x in when_deps)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not virtuals_needed[parent_key]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
child_key = edge.spec.dag_hash()
|
||||||
|
if child_key not in virtuals_provided:
|
||||||
|
virtuals_provided[child_key] = set()
|
||||||
try:
|
try:
|
||||||
provider = spec[vspec]
|
child_pkg = edge.spec.package
|
||||||
except KeyError:
|
except Exception as e:
|
||||||
# Virtual not in the DAG
|
warnings.warn(
|
||||||
|
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
virtuals_provided[child_key].update(x.name for x in child_pkg.virtuals_provided)
|
||||||
|
|
||||||
|
if not virtuals_provided[child_key]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for edge in provider.edges_from_dependents():
|
virtuals_to_add = virtuals_needed[parent_key] & virtuals_provided[child_key]
|
||||||
edge.update_virtuals([vspec])
|
if virtuals_to_add:
|
||||||
|
edge.update_virtuals(virtuals_to_add)
|
||||||
|
|
||||||
|
|
||||||
class SpecfileReaderBase:
|
class SpecfileReaderBase:
|
||||||
@@ -5212,6 +5130,13 @@ def get_host_environment() -> Dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def eval_conditional(string):
|
||||||
|
"""Evaluate conditional definitions using restricted variable scope."""
|
||||||
|
valid_variables = get_host_environment()
|
||||||
|
valid_variables.update({"re": re, "env": os.environ})
|
||||||
|
return eval(string, valid_variables)
|
||||||
|
|
||||||
|
|
||||||
class SpecParseError(spack.error.SpecError):
|
class SpecParseError(spack.error.SpecError):
|
||||||
"""Wrapper for ParseError for when we're parsing specs."""
|
"""Wrapper for ParseError for when we're parsing specs."""
|
||||||
|
|
||||||
|
@@ -200,7 +200,11 @@ def dummy_prefix(tmpdir):
|
|||||||
@pytest.mark.requires_executables(*required_executables)
|
@pytest.mark.requires_executables(*required_executables)
|
||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.usefixtures(
|
@pytest.mark.usefixtures(
|
||||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
"default_config",
|
||||||
|
"cache_directory",
|
||||||
|
"install_dir_default_layout",
|
||||||
|
"temporary_mirror",
|
||||||
|
"mutable_mock_env_path",
|
||||||
)
|
)
|
||||||
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||||
"""
|
"""
|
||||||
@@ -272,7 +276,11 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
|||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.nomockstage
|
@pytest.mark.nomockstage
|
||||||
@pytest.mark.usefixtures(
|
@pytest.mark.usefixtures(
|
||||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
"default_config",
|
||||||
|
"cache_directory",
|
||||||
|
"install_dir_default_layout",
|
||||||
|
"temporary_mirror",
|
||||||
|
"mutable_mock_env_path",
|
||||||
)
|
)
|
||||||
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||||
"""
|
"""
|
||||||
@@ -569,7 +577,6 @@ def test_FetchCacheError_only_accepts_lists_of_errors():
|
|||||||
def test_FetchCacheError_pretty_printing_multiple():
|
def test_FetchCacheError_pretty_printing_multiple():
|
||||||
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
|
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
|
||||||
str_e = str(e)
|
str_e = str(e)
|
||||||
print("'" + str_e + "'")
|
|
||||||
assert "Multiple errors" in str_e
|
assert "Multiple errors" in str_e
|
||||||
assert "Error 1: RuntimeError: Oops!" in str_e
|
assert "Error 1: RuntimeError: Oops!" in str_e
|
||||||
assert "Error 2: TypeError: Trouble!" in str_e
|
assert "Error 2: TypeError: Trouble!" in str_e
|
||||||
|
@@ -388,7 +388,7 @@ def test_wrapper_variables(
|
|||||||
root = spack.concretize.concretize_one("dt-diamond")
|
root = spack.concretize.concretize_one("dt-diamond")
|
||||||
|
|
||||||
for s in root.traverse():
|
for s in root.traverse():
|
||||||
s.prefix = "/{0}-prefix/".format(s.name)
|
s.set_prefix(f"/{s.name}-prefix/")
|
||||||
|
|
||||||
dep_pkg = root["dt-diamond-left"].package
|
dep_pkg = root["dt-diamond-left"].package
|
||||||
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
|
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
|
||||||
@@ -396,7 +396,7 @@ def test_wrapper_variables(
|
|||||||
dep_libs = LibraryList(dep_lib_paths)
|
dep_libs = LibraryList(dep_lib_paths)
|
||||||
|
|
||||||
dep2_pkg = root["dt-diamond-right"].package
|
dep2_pkg = root["dt-diamond-right"].package
|
||||||
dep2_pkg.spec.prefix = str(installation_dir_with_headers)
|
dep2_pkg.spec.set_prefix(str(installation_dir_with_headers))
|
||||||
|
|
||||||
setattr(dep_pkg, "libs", dep_libs)
|
setattr(dep_pkg, "libs", dep_libs)
|
||||||
try:
|
try:
|
||||||
@@ -542,7 +542,7 @@ def test_build_jobs_sequential_is_sequential():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=False,
|
parallel=False,
|
||||||
max_cpus=8,
|
max_cpus=8,
|
||||||
config=spack.config.Configuration(
|
config=spack.config.create_from(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
||||||
),
|
),
|
||||||
@@ -556,7 +556,7 @@ def test_build_jobs_command_line_overrides():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=1,
|
max_cpus=1,
|
||||||
config=spack.config.Configuration(
|
config=spack.config.create_from(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
||||||
),
|
),
|
||||||
@@ -567,7 +567,7 @@ def test_build_jobs_command_line_overrides():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=100,
|
max_cpus=100,
|
||||||
config=spack.config.Configuration(
|
config=spack.config.create_from(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
||||||
),
|
),
|
||||||
@@ -581,7 +581,7 @@ def test_build_jobs_defaults():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=10,
|
max_cpus=10,
|
||||||
config=spack.config.Configuration(
|
config=spack.config.create_from(
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -591,7 +591,7 @@ def test_build_jobs_defaults():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=10,
|
max_cpus=10,
|
||||||
config=spack.config.Configuration(
|
config=spack.config.create_from(
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@@ -403,8 +403,8 @@ def test_autoreconf_search_path_args_multiple(default_mock_concretization, tmpdi
|
|||||||
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
|
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
|
||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.prefix = str(tmpdir.join("fst"))
|
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
||||||
build_dep_two.prefix = str(tmpdir.join("snd"))
|
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
||||||
"-I",
|
"-I",
|
||||||
aclocal_fst,
|
aclocal_fst,
|
||||||
@@ -422,8 +422,8 @@ def test_autoreconf_search_path_args_skip_automake(default_mock_concretization,
|
|||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.name = "automake"
|
build_dep_one.name = "automake"
|
||||||
build_dep_one.prefix = str(tmpdir.join("fst"))
|
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
||||||
build_dep_two.prefix = str(tmpdir.join("snd"))
|
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
|
||||||
|
|
||||||
|
|
||||||
@@ -434,7 +434,7 @@ def test_autoreconf_search_path_args_external_order(default_mock_concretization,
|
|||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.external_path = str(tmpdir.join("fst"))
|
build_dep_one.external_path = str(tmpdir.join("fst"))
|
||||||
build_dep_two.prefix = str(tmpdir.join("snd"))
|
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
||||||
"-I",
|
"-I",
|
||||||
aclocal_snd,
|
aclocal_snd,
|
||||||
@@ -447,8 +447,8 @@ def test_autoreconf_search_path_skip_nonexisting(default_mock_concretization, tm
|
|||||||
"""Skip -I flags for non-existing directories"""
|
"""Skip -I flags for non-existing directories"""
|
||||||
spec = default_mock_concretization("dttop")
|
spec = default_mock_concretization("dttop")
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.prefix = str(tmpdir.join("fst"))
|
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
||||||
build_dep_two.prefix = str(tmpdir.join("snd"))
|
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []
|
||||||
|
|
||||||
|
|
||||||
|
@@ -210,7 +210,6 @@ def check_args_contents(cc, args, must_contain, must_not_contain):
|
|||||||
"""
|
"""
|
||||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||||
print(cc_modified_args)
|
|
||||||
for a in must_contain:
|
for a in must_contain:
|
||||||
assert a in cc_modified_args
|
assert a in cc_modified_args
|
||||||
for a in must_not_contain:
|
for a in must_not_contain:
|
||||||
|
@@ -347,7 +347,6 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
|||||||
for key, val in expectations.items():
|
for key, val in expectations.items():
|
||||||
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
|
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
|
||||||
affected_pkg_names = set([s.name for s in affected_specs])
|
affected_pkg_names = set([s.name for s in affected_specs])
|
||||||
print(f"{key}: {affected_pkg_names}")
|
|
||||||
assert affected_pkg_names == val
|
assert affected_pkg_names == val
|
||||||
|
|
||||||
|
|
||||||
|
@@ -214,9 +214,7 @@ def verify_mirror_contents():
|
|||||||
if in_env_pkg in p:
|
if in_env_pkg in p:
|
||||||
found_pkg = True
|
found_pkg = True
|
||||||
|
|
||||||
if not found_pkg:
|
assert found_pkg, f"Expected to find {in_env_pkg} in {dest_mirror_dir}"
|
||||||
print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
|
|
||||||
assert False
|
|
||||||
|
|
||||||
# Install a package and put it in the buildcache
|
# Install a package and put it in the buildcache
|
||||||
s = spack.concretize.concretize_one(out_env_pkg)
|
s = spack.concretize.concretize_one(out_env_pkg)
|
||||||
|
@@ -5,6 +5,7 @@
|
|||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import textwrap
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -259,15 +260,25 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
|||||||
def test_updated_completion_scripts(shell, tmpdir):
|
def test_updated_completion_scripts(shell, tmpdir):
|
||||||
"""Make sure our shell tab completion scripts remain up-to-date."""
|
"""Make sure our shell tab completion scripts remain up-to-date."""
|
||||||
|
|
||||||
msg = (
|
width = 72
|
||||||
|
lines = textwrap.wrap(
|
||||||
"It looks like Spack's command-line interface has been modified. "
|
"It looks like Spack's command-line interface has been modified. "
|
||||||
"Please update Spack's shell tab completion scripts by running:\n\n"
|
"If differences are more than your global 'include:' scopes, please "
|
||||||
" spack commands --update-completion\n\n"
|
"update Spack's shell tab completion scripts by running:",
|
||||||
"and adding the changed files to your pull request."
|
width,
|
||||||
)
|
)
|
||||||
|
lines.append("\n spack commands --update-completion\n")
|
||||||
|
lines.extend(
|
||||||
|
textwrap.wrap(
|
||||||
|
"and adding the changed files (minus your global 'include:' scopes) "
|
||||||
|
"to your pull request.",
|
||||||
|
width,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
msg = "\n".join(lines)
|
||||||
|
|
||||||
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
||||||
script = "spack-completion.{0}".format(shell)
|
script = f"spack-completion.{shell}"
|
||||||
old_script = os.path.join(spack.paths.share_path, script)
|
old_script = os.path.join(spack.paths.share_path, script)
|
||||||
new_script = str(tmpdir.join(script))
|
new_script = str(tmpdir.join(script))
|
||||||
|
|
||||||
|
@@ -213,7 +213,7 @@ def test_config_add_update_dict(mutable_empty_config):
|
|||||||
|
|
||||||
def test_config_with_c_argument(mutable_empty_config):
|
def test_config_with_c_argument(mutable_empty_config):
|
||||||
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
||||||
config_file = "config:install_root:root:/path/to/config.yaml"
|
config_file = "config:install_tree:root:/path/to/config.yaml"
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
args = parser.parse_args(["-c", config_file])
|
args = parser.parse_args(["-c", config_file])
|
||||||
assert config_file in args.config_vars
|
assert config_file in args.config_vars
|
||||||
@@ -221,7 +221,7 @@ def test_config_with_c_argument(mutable_empty_config):
|
|||||||
# Add the path to the config
|
# Add the path to the config
|
||||||
config("add", args.config_vars[0], scope="command_line")
|
config("add", args.config_vars[0], scope="command_line")
|
||||||
output = config("get", "config")
|
output = config("get", "config")
|
||||||
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
|
assert "config:\n install_tree:\n root: /path/to/config.yaml" in output
|
||||||
|
|
||||||
|
|
||||||
def test_config_add_ordered_dict(mutable_empty_config):
|
def test_config_add_ordered_dict(mutable_empty_config):
|
||||||
|
@@ -15,6 +15,9 @@
|
|||||||
deprecate = SpackCommand("deprecate")
|
deprecate = SpackCommand("deprecate")
|
||||||
find = SpackCommand("find")
|
find = SpackCommand("find")
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
|
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
||||||
|
|
||||||
|
|
||||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
install("--fake", "libelf@0.8.13")
|
install("--fake", "libelf@0.8.13")
|
||||||
|
@@ -1067,13 +1067,17 @@ def test_init_from_yaml_relative_includes(tmp_path):
|
|||||||
assert os.path.exists(os.path.join(e2.path, f))
|
assert os.path.exists(os.path.join(e2.path, f))
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Should we be supporting relative path rewrites when creating new env from existing?
|
||||||
|
# TODO: If so, then this should confirm that the absolute include paths in the new env exist.
|
||||||
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
||||||
files = ["../outside_env_not_copied/repos.yaml"]
|
"""Ensure relative includes to files outside the environment fail."""
|
||||||
|
files = ["../outside_env/repos.yaml"]
|
||||||
|
|
||||||
manifest = f"""
|
manifest = f"""
|
||||||
spack:
|
spack:
|
||||||
specs: []
|
specs: []
|
||||||
include: {files}
|
include:
|
||||||
|
- path: {files[0]}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# subdir to ensure parent of environment dir is not shared
|
# subdir to ensure parent of environment dir is not shared
|
||||||
@@ -1086,7 +1090,7 @@ def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
|||||||
for f in files:
|
for f in files:
|
||||||
fs.touchp(e1_path / f)
|
fs.touchp(e1_path / f)
|
||||||
|
|
||||||
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
|
with pytest.raises(ValueError, match="does not exist"):
|
||||||
_ = _env_create("test2", init_file=e1_manifest)
|
_ = _env_create("test2", init_file=e1_manifest)
|
||||||
|
|
||||||
|
|
||||||
@@ -1186,14 +1190,14 @@ def test_env_with_config(environment_from_manifest):
|
|||||||
|
|
||||||
|
|
||||||
def test_with_config_bad_include_create(environment_from_manifest):
|
def test_with_config_bad_include_create(environment_from_manifest):
|
||||||
"""Confirm missing include paths raise expected exception and error."""
|
"""Confirm missing required include raises expected exception."""
|
||||||
with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
|
err = "does not exist"
|
||||||
|
with pytest.raises(ValueError, match=err):
|
||||||
environment_from_manifest(
|
environment_from_manifest(
|
||||||
"""
|
"""
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- /no/such/directory
|
- /no/such/directory
|
||||||
- no/such/file.yaml
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1203,34 +1207,25 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
|||||||
include1 = env_root / "include1.yaml"
|
include1 = env_root / "include1.yaml"
|
||||||
include1.touch()
|
include1.touch()
|
||||||
|
|
||||||
abs_include_path = os.path.abspath(tmpdir.join("subdir").ensure("include2.yaml"))
|
|
||||||
|
|
||||||
spack_yaml = env_root / ev.manifest_name
|
spack_yaml = env_root / ev.manifest_name
|
||||||
spack_yaml.write_text(
|
spack_yaml.write_text(
|
||||||
f"""
|
"""
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- ./include1.yaml
|
- ./include1.yaml
|
||||||
- {abs_include_path}
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
with ev.Environment(env_root) as e:
|
with ev.Environment(env_root) as e:
|
||||||
e.concretize()
|
e.concretize()
|
||||||
|
|
||||||
# we've created an environment with some included config files (which do
|
# We've created an environment with included config file (which does
|
||||||
# in fact exist): now we remove them and check that we get a sensible
|
# exist). Now we remove it and check that we get a sensible error.
|
||||||
# error message
|
|
||||||
|
|
||||||
os.remove(abs_include_path)
|
|
||||||
os.remove(include1)
|
os.remove(include1)
|
||||||
with pytest.raises(spack.config.ConfigFileError) as exc:
|
with pytest.raises(ValueError, match="does not exist"):
|
||||||
ev.activate(ev.Environment(env_root))
|
ev.activate(ev.Environment(env_root))
|
||||||
|
|
||||||
err = exc.value.message
|
|
||||||
assert "missing include" in err
|
|
||||||
assert abs_include_path in err
|
|
||||||
assert "include1.yaml" in err
|
|
||||||
assert ev.active_environment() is None
|
assert ev.active_environment() is None
|
||||||
|
|
||||||
|
|
||||||
@@ -1338,8 +1333,10 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
|||||||
included file scope.
|
included file scope.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
env_path = tmp_path / "test_config"
|
||||||
|
fs.mkdirp(env_path)
|
||||||
included_file = "included-packages.yaml"
|
included_file = "included-packages.yaml"
|
||||||
included_path = tmp_path / included_file
|
included_path = env_path / included_file
|
||||||
with open(included_path, "w", encoding="utf-8") as f:
|
with open(included_path, "w", encoding="utf-8") as f:
|
||||||
f.write(
|
f.write(
|
||||||
"""\
|
"""\
|
||||||
@@ -1355,7 +1352,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
spack_yaml = tmp_path / ev.manifest_name
|
spack_yaml = env_path / ev.manifest_name
|
||||||
spack_yaml.write_text(
|
spack_yaml.write_text(
|
||||||
f"""\
|
f"""\
|
||||||
spack:
|
spack:
|
||||||
@@ -1369,7 +1366,8 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
e = ev.Environment(tmp_path)
|
mutable_config.set("config:misc_cache", str(tmp_path / "cache"))
|
||||||
|
e = ev.Environment(env_path)
|
||||||
with e:
|
with e:
|
||||||
# List of requirements, flip a variant
|
# List of requirements, flip a variant
|
||||||
config("change", "packages:mpich:require:~debug")
|
config("change", "packages:mpich:require:~debug")
|
||||||
@@ -1459,19 +1457,6 @@ def test_env_with_included_config_file_url(tmpdir, mutable_empty_config, package
|
|||||||
assert cfg["mpileaks"]["version"] == ["2.2"]
|
assert cfg["mpileaks"]["version"] == ["2.2"]
|
||||||
|
|
||||||
|
|
||||||
def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
|
|
||||||
"""Test inclusion of a missing configuration file raises FetchError
|
|
||||||
noting missing file."""
|
|
||||||
|
|
||||||
spack_yaml = tmpdir.join("spack.yaml")
|
|
||||||
missing_file = tmpdir.join("packages.yaml")
|
|
||||||
with spack_yaml.open("w") as f:
|
|
||||||
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
|
|
||||||
|
|
||||||
with pytest.raises(spack.error.ConfigError, match="missing include path"):
|
|
||||||
ev.Environment(tmpdir.strpath)
|
|
||||||
|
|
||||||
|
|
||||||
def test_env_with_included_config_scope(mutable_mock_env_path, packages_file):
|
def test_env_with_included_config_scope(mutable_mock_env_path, packages_file):
|
||||||
"""Test inclusion of a package file from the environment's configuration
|
"""Test inclusion of a package file from the environment's configuration
|
||||||
stage directory. This test is intended to represent a case where a remote
|
stage directory. This test is intended to represent a case where a remote
|
||||||
@@ -1566,7 +1551,7 @@ def test_env_with_included_config_precedence(tmp_path):
|
|||||||
|
|
||||||
|
|
||||||
def test_env_with_included_configs_precedence(tmp_path):
|
def test_env_with_included_configs_precedence(tmp_path):
|
||||||
"""Test precendence of multiple included configuration files."""
|
"""Test precedence of multiple included configuration files."""
|
||||||
file1 = "high-config.yaml"
|
file1 = "high-config.yaml"
|
||||||
file2 = "low-config.yaml"
|
file2 = "low-config.yaml"
|
||||||
|
|
||||||
@@ -4277,21 +4262,31 @@ def test_unify_when_possible_works_around_conflicts():
|
|||||||
assert len([x for x in e.all_specs() if x.satisfies("mpich")]) == 1
|
assert len([x for x in e.all_specs() if x.satisfies("mpich")]) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# Using mock_include_cache to ensure the "remote" file is cached in a temporary
|
||||||
|
# location and not polluting the user cache.
|
||||||
def test_env_include_packages_url(
|
def test_env_include_packages_url(
|
||||||
tmpdir, mutable_empty_config, mock_spider_configs, mock_curl_configs
|
tmpdir, mutable_empty_config, mock_fetch_url_text, mock_curl_configs, mock_include_cache
|
||||||
):
|
):
|
||||||
"""Test inclusion of a (GitHub) URL."""
|
"""Test inclusion of a (GitHub) URL."""
|
||||||
develop_url = "https://github.com/fake/fake/blob/develop/"
|
develop_url = "https://github.com/fake/fake/blob/develop/"
|
||||||
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
|
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
|
||||||
|
sha256 = "a422e35b3a18869d0611a4137b37314131749ecdc070a7cd7183f488da81201a"
|
||||||
spack_yaml = tmpdir.join("spack.yaml")
|
spack_yaml = tmpdir.join("spack.yaml")
|
||||||
with spack_yaml.open("w") as f:
|
with spack_yaml.open("w") as f:
|
||||||
f.write("spack:\n include:\n - {0}\n".format(default_packages))
|
f.write(
|
||||||
assert os.path.isfile(spack_yaml.strpath)
|
f"""\
|
||||||
|
spack:
|
||||||
|
include:
|
||||||
|
- path: {default_packages}
|
||||||
|
sha256: {sha256}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
env = ev.Environment(tmpdir.strpath)
|
env = ev.Environment(tmpdir.strpath)
|
||||||
ev.activate(env)
|
ev.activate(env)
|
||||||
|
|
||||||
|
# Make sure a setting from test/data/config/packages.yaml is present
|
||||||
cfg = spack.config.get("packages")
|
cfg = spack.config.get("packages")
|
||||||
assert "mpich" in cfg["all"]["providers"]["mpi"]
|
assert "mpich" in cfg["all"]["providers"]["mpi"]
|
||||||
|
|
||||||
@@ -4360,7 +4355,7 @@ def test_env_view_disabled(tmp_path, mutable_mock_env_path):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("first", ["false", "true", "custom"])
|
@pytest.mark.parametrize("first", ["false", "true", "custom"])
|
||||||
def test_env_include_mixed_views(tmp_path, mutable_mock_env_path, mutable_config, first):
|
def test_env_include_mixed_views(tmp_path, mutable_config, mutable_mock_env_path, first):
|
||||||
"""Ensure including path and boolean views in different combinations result
|
"""Ensure including path and boolean views in different combinations result
|
||||||
in the creation of only the first view if it is not disabled."""
|
in the creation of only the first view if it is not disabled."""
|
||||||
false_yaml = tmp_path / "false-view.yaml"
|
false_yaml = tmp_path / "false-view.yaml"
|
||||||
|
@@ -718,10 +718,11 @@ def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
|
|||||||
assert os.path.exists(root.prefix)
|
assert os.path.exists(root.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||||
@pytest.mark.regression("12002")
|
@pytest.mark.regression("12002")
|
||||||
def test_install_only_dependencies_in_env(
|
def test_install_only_dependencies_in_env(
|
||||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
tmpdir, mutable_mock_env_path, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
env("create", "test")
|
env("create", "test")
|
||||||
|
|
||||||
@@ -735,9 +736,10 @@ def test_install_only_dependencies_in_env(
|
|||||||
assert not os.path.exists(root.prefix)
|
assert not os.path.exists(root.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.regression("12002")
|
@pytest.mark.regression("12002")
|
||||||
def test_install_only_dependencies_of_all_in_env(
|
def test_install_only_dependencies_of_all_in_env(
|
||||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
tmpdir, mutable_mock_env_path, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
env("create", "--without-view", "test")
|
env("create", "--without-view", "test")
|
||||||
|
|
||||||
@@ -757,7 +759,8 @@ def test_install_only_dependencies_of_all_in_env(
|
|||||||
assert os.path.exists(dep.prefix)
|
assert os.path.exists(dep.prefix)
|
||||||
|
|
||||||
|
|
||||||
def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path):
|
# Unit tests should not be affected by the user's managed environments
|
||||||
|
def test_install_no_add_in_env(tmpdir, mutable_mock_env_path, mock_fetch, install_mockery):
|
||||||
# To test behavior of --add option, we create the following environment:
|
# To test behavior of --add option, we create the following environment:
|
||||||
#
|
#
|
||||||
# mpileaks
|
# mpileaks
|
||||||
@@ -898,7 +901,6 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
|||||||
specfile = "./spec.json"
|
specfile = "./spec.json"
|
||||||
with open(specfile, "w", encoding="utf-8") as f:
|
with open(specfile, "w", encoding="utf-8") as f:
|
||||||
f.write(spec.to_json())
|
f.write(spec.to_json())
|
||||||
print(spec.to_json())
|
|
||||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||||
# Verify Configure.xml exists with expected contents.
|
# Verify Configure.xml exists with expected contents.
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
@@ -933,9 +935,10 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
|||||||
assert "using the `spack.yaml` in this directory" in output
|
assert "using the `spack.yaml` in this directory" in output
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||||
def test_install_env_with_tests_all(
|
def test_install_env_with_tests_all(
|
||||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
env("create", "test")
|
env("create", "test")
|
||||||
with ev.read("test"):
|
with ev.read("test"):
|
||||||
@@ -945,9 +948,10 @@ def test_install_env_with_tests_all(
|
|||||||
assert os.path.exists(test_dep.prefix)
|
assert os.path.exists(test_dep.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||||
def test_install_env_with_tests_root(
|
def test_install_env_with_tests_root(
|
||||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
env("create", "test")
|
env("create", "test")
|
||||||
with ev.read("test"):
|
with ev.read("test"):
|
||||||
@@ -957,9 +961,10 @@ def test_install_env_with_tests_root(
|
|||||||
assert not os.path.exists(test_dep.prefix)
|
assert not os.path.exists(test_dep.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||||
def test_install_empty_env(
|
def test_install_empty_env(
|
||||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
env_name = "empty"
|
env_name = "empty"
|
||||||
env("create", env_name)
|
env("create", env_name)
|
||||||
@@ -995,9 +1000,17 @@ def test_installation_fail_tests(install_mockery, mock_fetch, name, method):
|
|||||||
assert "See test log for details" in output
|
assert "See test log for details" in output
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("Buildcache not supported on windows")
|
@pytest.mark.not_on_windows("Buildcache not supported on windows")
|
||||||
def test_install_use_buildcache(
|
def test_install_use_buildcache(
|
||||||
capsys, mock_packages, mock_fetch, mock_archive, mock_binary_index, tmpdir, install_mockery
|
capsys,
|
||||||
|
mutable_mock_env_path,
|
||||||
|
mock_packages,
|
||||||
|
mock_fetch,
|
||||||
|
mock_archive,
|
||||||
|
mock_binary_index,
|
||||||
|
tmpdir,
|
||||||
|
install_mockery,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Make sure installing with use-buildcache behaves correctly.
|
Make sure installing with use-buildcache behaves correctly.
|
||||||
|
@@ -12,6 +12,9 @@
|
|||||||
install = SpackCommand("install")
|
install = SpackCommand("install")
|
||||||
uninstall = SpackCommand("uninstall")
|
uninstall = SpackCommand("uninstall")
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
|
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.db
|
@pytest.mark.db
|
||||||
def test_mark_mode_required(mutable_database):
|
def test_mark_mode_required(mutable_database):
|
||||||
|
@@ -38,8 +38,9 @@ def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
|
|||||||
assert "as it is an external spec" in output
|
assert "as it is an external spec" in output
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.regression("12345")
|
@pytest.mark.regression("12345")
|
||||||
def test_mirror_from_env(tmp_path, mock_packages, mock_fetch, mutable_mock_env_path):
|
def test_mirror_from_env(mutable_mock_env_path, tmp_path, mock_packages, mock_fetch):
|
||||||
mirror_dir = str(tmp_path / "mirror")
|
mirror_dir = str(tmp_path / "mirror")
|
||||||
env_name = "test"
|
env_name = "test"
|
||||||
|
|
||||||
@@ -342,8 +343,16 @@ def test_mirror_name_collision(mutable_config):
|
|||||||
mirror("add", "first", "1")
|
mirror("add", "first", "1")
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
def test_mirror_destroy(
|
def test_mirror_destroy(
|
||||||
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch, tmpdir
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_packages,
|
||||||
|
mock_fetch,
|
||||||
|
mock_archive,
|
||||||
|
mutable_config,
|
||||||
|
monkeypatch,
|
||||||
|
tmpdir,
|
||||||
):
|
):
|
||||||
# Create a temp mirror directory for buildcache usage
|
# Create a temp mirror directory for buildcache usage
|
||||||
mirror_dir = tmpdir.join("mirror_dir")
|
mirror_dir = tmpdir.join("mirror_dir")
|
||||||
|
@@ -42,7 +42,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
|||||||
repo_dir = root_dir / "builtin.mock"
|
repo_dir = root_dir / "builtin.mock"
|
||||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(str(root_dir / "cache"))
|
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
||||||
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
||||||
mock_repo_packages = mock_repo.repos[0].packages_path
|
mock_repo_packages = mock_repo.repos[0].packages_path
|
||||||
|
|
||||||
|
@@ -5,9 +5,13 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import spack.config
|
||||||
|
import spack.environment as ev
|
||||||
import spack.main
|
import spack.main
|
||||||
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
repo = spack.main.SpackCommand("repo")
|
repo = spack.main.SpackCommand("repo")
|
||||||
|
env = SpackCommand("env")
|
||||||
|
|
||||||
|
|
||||||
def test_help_option():
|
def test_help_option():
|
||||||
@@ -33,3 +37,33 @@ def test_create_add_list_remove(mutable_config, tmpdir):
|
|||||||
repo("remove", "--scope=site", str(tmpdir))
|
repo("remove", "--scope=site", str(tmpdir))
|
||||||
output = repo("list", "--scope=site", output=str)
|
output = repo("list", "--scope=site", output=str)
|
||||||
assert "mockrepo" not in output
|
assert "mockrepo" not in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_repo_path_vars_substitution(
|
||||||
|
tmpdir, install_mockery, mutable_mock_env_path, monkeypatch
|
||||||
|
):
|
||||||
|
"""Test Spack correctly substitues repo paths with environment variables when creating an
|
||||||
|
environment from a manifest file."""
|
||||||
|
|
||||||
|
monkeypatch.setenv("CUSTOM_REPO_PATH", ".")
|
||||||
|
|
||||||
|
# setup environment from spack.yaml
|
||||||
|
envdir = tmpdir.mkdir("env")
|
||||||
|
with envdir.as_cwd():
|
||||||
|
with open("spack.yaml", "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- $CUSTOM_REPO_PATH
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
# creating env from manifest file
|
||||||
|
env("create", "test", "./spack.yaml")
|
||||||
|
# check that repo path was correctly substituted with the environment variable
|
||||||
|
current_dir = os.getcwd()
|
||||||
|
with ev.read("test") as newenv:
|
||||||
|
repos_specs = spack.config.get("repos", default={}, scope=newenv.scope_name)
|
||||||
|
assert current_dir in repos_specs
|
||||||
|
@@ -13,7 +13,10 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
from spack.main import SpackCommand, SpackCommandError
|
from spack.main import SpackCommand, SpackCommandError
|
||||||
|
|
||||||
pytestmark = pytest.mark.usefixtures("mutable_config", "mutable_mock_repo")
|
# Unit tests should not be affected by the user's managed environments
|
||||||
|
pytestmark = pytest.mark.usefixtures(
|
||||||
|
"mutable_mock_env_path", "mutable_config", "mutable_mock_repo"
|
||||||
|
)
|
||||||
|
|
||||||
spec = SpackCommand("spec")
|
spec = SpackCommand("spec")
|
||||||
|
|
||||||
|
@@ -16,6 +16,9 @@
|
|||||||
uninstall = SpackCommand("uninstall")
|
uninstall = SpackCommand("uninstall")
|
||||||
install = SpackCommand("install")
|
install = SpackCommand("install")
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
|
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
||||||
|
|
||||||
|
|
||||||
class MockArgs:
|
class MockArgs:
|
||||||
def __init__(self, packages, all=False, force=False, dependents=False):
|
def __init__(self, packages, all=False, force=False, dependents=False):
|
||||||
@@ -220,9 +223,7 @@ class TestUninstallFromEnv:
|
|||||||
find = SpackCommand("find")
|
find = SpackCommand("find")
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def environment_setup(
|
def environment_setup(self, mock_packages, mutable_database, install_mockery):
|
||||||
self, mutable_mock_env_path, mock_packages, mutable_database, install_mockery
|
|
||||||
):
|
|
||||||
TestUninstallFromEnv.env("create", "e1")
|
TestUninstallFromEnv.env("create", "e1")
|
||||||
e1 = spack.environment.read("e1")
|
e1 = spack.environment.read("e1")
|
||||||
with e1:
|
with e1:
|
||||||
|
@@ -50,7 +50,7 @@ def test_list_long(capsys):
|
|||||||
def test_list_long_with_pytest_arg(capsys):
|
def test_list_long_with_pytest_arg(capsys):
|
||||||
with capsys.disabled():
|
with capsys.disabled():
|
||||||
output = spack_test("--list-long", cmd_test_py)
|
output = spack_test("--list-long", cmd_test_py)
|
||||||
print(output)
|
|
||||||
assert "unit_test.py::\n" in output
|
assert "unit_test.py::\n" in output
|
||||||
assert "test_list" in output
|
assert "test_list" in output
|
||||||
assert "test_list_with_pytest_arg" in output
|
assert "test_list_with_pytest_arg" in output
|
||||||
|
@@ -4,19 +4,31 @@
|
|||||||
|
|
||||||
"""Tests for the `spack verify` command"""
|
"""Tests for the `spack verify` command"""
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
|
import spack.cmd.verify
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
|
import spack.installer
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.util.executable
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.verify
|
import spack.verify
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand, SpackCommandError
|
||||||
|
|
||||||
verify = SpackCommand("verify")
|
verify = SpackCommand("verify")
|
||||||
install = SpackCommand("install")
|
install = SpackCommand("install")
|
||||||
|
|
||||||
|
|
||||||
|
def skip_unless_linux(f):
|
||||||
|
return pytest.mark.skipif(
|
||||||
|
str(platform.system()) != "Linux", reason="only tested on linux for now"
|
||||||
|
)(f)
|
||||||
|
|
||||||
|
|
||||||
def test_single_file_verify_cmd(tmpdir):
|
def test_single_file_verify_cmd(tmpdir):
|
||||||
# Test the verify command interface to verifying a single file.
|
# Test the verify command interface to verifying a single file.
|
||||||
filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
|
filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
|
||||||
@@ -36,15 +48,14 @@ def test_single_file_verify_cmd(tmpdir):
|
|||||||
with open(manifest_file, "w", encoding="utf-8") as f:
|
with open(manifest_file, "w", encoding="utf-8") as f:
|
||||||
sjson.dump({filepath: data}, f)
|
sjson.dump({filepath: data}, f)
|
||||||
|
|
||||||
results = verify("-f", filepath, fail_on_error=False)
|
results = verify("manifest", "-f", filepath, fail_on_error=False)
|
||||||
print(results)
|
|
||||||
assert not results
|
assert not results
|
||||||
|
|
||||||
os.utime(filepath, (0, 0))
|
os.utime(filepath, (0, 0))
|
||||||
with open(filepath, "w", encoding="utf-8") as f:
|
with open(filepath, "w", encoding="utf-8") as f:
|
||||||
f.write("I changed.")
|
f.write("I changed.")
|
||||||
|
|
||||||
results = verify("-f", filepath, fail_on_error=False)
|
results = verify("manifest", "-f", filepath, fail_on_error=False)
|
||||||
|
|
||||||
expected = ["hash"]
|
expected = ["hash"]
|
||||||
mtime = os.stat(filepath).st_mtime
|
mtime = os.stat(filepath).st_mtime
|
||||||
@@ -55,7 +66,7 @@ def test_single_file_verify_cmd(tmpdir):
|
|||||||
assert filepath in results
|
assert filepath in results
|
||||||
assert all(x in results for x in expected)
|
assert all(x in results for x in expected)
|
||||||
|
|
||||||
results = verify("-fj", filepath, fail_on_error=False)
|
results = verify("manifest", "-fj", filepath, fail_on_error=False)
|
||||||
res = sjson.load(results)
|
res = sjson.load(results)
|
||||||
assert len(res) == 1
|
assert len(res) == 1
|
||||||
errors = res.pop(filepath)
|
errors = res.pop(filepath)
|
||||||
@@ -69,18 +80,68 @@ def test_single_spec_verify_cmd(tmpdir, mock_packages, mock_archive, mock_fetch,
|
|||||||
prefix = s.prefix
|
prefix = s.prefix
|
||||||
hash = s.dag_hash()
|
hash = s.dag_hash()
|
||||||
|
|
||||||
results = verify("/%s" % hash, fail_on_error=False)
|
results = verify("manifest", "/%s" % hash, fail_on_error=False)
|
||||||
assert not results
|
assert not results
|
||||||
|
|
||||||
new_file = os.path.join(prefix, "new_file_for_verify_test")
|
new_file = os.path.join(prefix, "new_file_for_verify_test")
|
||||||
with open(new_file, "w", encoding="utf-8") as f:
|
with open(new_file, "w", encoding="utf-8") as f:
|
||||||
f.write("New file")
|
f.write("New file")
|
||||||
|
|
||||||
results = verify("/%s" % hash, fail_on_error=False)
|
results = verify("manifest", "/%s" % hash, fail_on_error=False)
|
||||||
assert new_file in results
|
assert new_file in results
|
||||||
assert "added" in results
|
assert "added" in results
|
||||||
|
|
||||||
results = verify("-j", "/%s" % hash, fail_on_error=False)
|
results = verify("manifest", "-j", "/%s" % hash, fail_on_error=False)
|
||||||
res = sjson.load(results)
|
res = sjson.load(results)
|
||||||
assert len(res) == 1
|
assert len(res) == 1
|
||||||
assert res[new_file] == ["added"]
|
assert res[new_file] == ["added"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.requires_executables("gcc")
|
||||||
|
@skip_unless_linux
|
||||||
|
def test_libraries(tmp_path, install_mockery, mock_fetch):
|
||||||
|
gcc = spack.util.executable.which("gcc", required=True)
|
||||||
|
s = spack.concretize.concretize_one("libelf")
|
||||||
|
spack.installer.PackageInstaller([s.package]).install()
|
||||||
|
os.mkdir(s.prefix.bin)
|
||||||
|
|
||||||
|
# There are no ELF files so the verification should pass
|
||||||
|
verify("libraries", f"/{s.dag_hash()}")
|
||||||
|
|
||||||
|
# Now put main_with_rpath linking to libf.so inside the prefix and verify again. This should
|
||||||
|
# work because libf.so can be located in the rpath.
|
||||||
|
(tmp_path / "f.c").write_text("void f(void){return;}")
|
||||||
|
(tmp_path / "main.c").write_text("void f(void); int main(void){f();return 0;}")
|
||||||
|
|
||||||
|
gcc("-shared", "-fPIC", "-o", str(tmp_path / "libf.so"), str(tmp_path / "f.c"))
|
||||||
|
gcc(
|
||||||
|
"-o",
|
||||||
|
str(s.prefix.bin.main_with_rpath),
|
||||||
|
str(tmp_path / "main.c"),
|
||||||
|
"-L",
|
||||||
|
str(tmp_path),
|
||||||
|
f"-Wl,-rpath,{tmp_path}",
|
||||||
|
"-lf",
|
||||||
|
)
|
||||||
|
verify("libraries", f"/{s.dag_hash()}")
|
||||||
|
|
||||||
|
# Now put main_without_rpath linking to libf.so inside the prefix and verify again. This should
|
||||||
|
# fail because libf.so cannot be located in the rpath.
|
||||||
|
gcc(
|
||||||
|
"-o",
|
||||||
|
str(s.prefix.bin.main_without_rpath),
|
||||||
|
str(tmp_path / "main.c"),
|
||||||
|
"-L",
|
||||||
|
str(tmp_path),
|
||||||
|
"-lf",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(SpackCommandError):
|
||||||
|
verify("libraries", f"/{s.dag_hash()}")
|
||||||
|
|
||||||
|
# Check the error message
|
||||||
|
msg = spack.cmd.verify._verify_libraries(s, [])
|
||||||
|
assert msg is not None and "libf.so => not found" in msg
|
||||||
|
|
||||||
|
# And check that we can make it pass by ignoring it.
|
||||||
|
assert spack.cmd.verify._verify_libraries(s, ["libf.so"]) is None
|
||||||
|
@@ -2018,7 +2018,6 @@ def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
|
|||||||
s = Spec("develop-branch-version@git.%s=develop" % git_ref)
|
s = Spec("develop-branch-version@git.%s=develop" % git_ref)
|
||||||
c = spack.concretize.concretize_one(s)
|
c = spack.concretize.concretize_one(s)
|
||||||
assert git_ref in str(c)
|
assert git_ref in str(c)
|
||||||
print(str(c))
|
|
||||||
assert s.satisfies("@develop")
|
assert s.satisfies("@develop")
|
||||||
assert s.satisfies("@0.1:")
|
assert s.satisfies("@0.1:")
|
||||||
|
|
||||||
@@ -2888,6 +2887,23 @@ def test_specifying_different_versions_build_deps(self):
|
|||||||
assert any(x.satisfies(hdf5_str) for x in result.specs)
|
assert any(x.satisfies(hdf5_str) for x in result.specs)
|
||||||
assert any(x.satisfies(pinned_str) for x in result.specs)
|
assert any(x.satisfies(pinned_str) for x in result.specs)
|
||||||
|
|
||||||
|
@pytest.mark.regression("44289")
|
||||||
|
def test_all_extensions_depend_on_same_extendee(self):
|
||||||
|
"""Tests that we don't reuse dependencies that bring in a different extendee"""
|
||||||
|
setuptools = spack.concretize.concretize_one("py-setuptools ^python@3.10")
|
||||||
|
|
||||||
|
solver = spack.solver.asp.Solver()
|
||||||
|
setup = spack.solver.asp.SpackSolverSetup()
|
||||||
|
result, _, _ = solver.driver.solve(
|
||||||
|
setup, [Spec("py-floating ^python@3.11")], reuse=list(setuptools.traverse())
|
||||||
|
)
|
||||||
|
assert len(result.specs) == 1
|
||||||
|
|
||||||
|
floating = result.specs[0]
|
||||||
|
assert all(setuptools.dag_hash() != x.dag_hash() for x in floating.traverse())
|
||||||
|
pythons = [x for x in floating.traverse() if x.name == "python"]
|
||||||
|
assert len(pythons) == 1 and pythons[0].satisfies("@3.11")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"v_str,v_opts,checksummed",
|
"v_str,v_opts,checksummed",
|
||||||
@@ -3238,3 +3254,54 @@ def test_spec_unification(unify, mutable_config, mock_packages):
|
|||||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_concretization_cache_roundtrip(use_concretization_cache, monkeypatch, mutable_config):
|
||||||
|
"""Tests whether we can write the results of a clingo solve to the cache
|
||||||
|
and load the same spec request from the cache to produce identical specs"""
|
||||||
|
# Force determinism:
|
||||||
|
# Solver setup is normally non-deterministic due to non-determinism in
|
||||||
|
# asp solver setup logic generation. The only other inputs to the cache keys are
|
||||||
|
# the .lp files, which are invariant over the course of this test.
|
||||||
|
# This method forces the same setup to be produced for the same specs
|
||||||
|
# which gives us a guarantee of cache hits, as it removes the only
|
||||||
|
# element of non deterministic solver setup for the same spec
|
||||||
|
# Basically just a quick and dirty memoization
|
||||||
|
solver_setup = spack.solver.asp.SpackSolverSetup.setup
|
||||||
|
|
||||||
|
def _setup(self, specs, *, reuse=None, allow_deprecated=False):
|
||||||
|
if not getattr(_setup, "cache_setup", None):
|
||||||
|
cache_setup = solver_setup(self, specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||||
|
setattr(_setup, "cache_setup", cache_setup)
|
||||||
|
return getattr(_setup, "cache_setup")
|
||||||
|
|
||||||
|
# monkeypatch our forced determinism setup method into solver setup
|
||||||
|
monkeypatch.setattr(spack.solver.asp.SpackSolverSetup, "setup", _setup)
|
||||||
|
|
||||||
|
assert spack.config.get("config:concretization_cache:enable")
|
||||||
|
|
||||||
|
# run one standard concretization to populate the cache and the setup method
|
||||||
|
# memoization
|
||||||
|
h = spack.concretize.concretize_one("hdf5")
|
||||||
|
|
||||||
|
# due to our forced determinism above, we should not be observing
|
||||||
|
# cache misses, assert that we're not storing any new cache entries
|
||||||
|
def _ensure_no_store(self, problem: str, result, statistics, test=False):
|
||||||
|
# always throw, we never want to reach this code path
|
||||||
|
assert False, "Concretization cache hit expected"
|
||||||
|
|
||||||
|
# Assert that we're actually hitting the cache
|
||||||
|
cache_fetch = spack.solver.asp.ConcretizationCache.fetch
|
||||||
|
|
||||||
|
def _ensure_cache_hits(self, problem: str):
|
||||||
|
result, statistics = cache_fetch(self, problem)
|
||||||
|
assert result, "Expected successful concretization cache hit"
|
||||||
|
assert statistics, "Expected statistics to be non null on cache hit"
|
||||||
|
return result, statistics
|
||||||
|
|
||||||
|
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "store", _ensure_no_store)
|
||||||
|
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "fetch", _ensure_cache_hits)
|
||||||
|
# ensure subsequent concretizations of the same spec produce the same spec
|
||||||
|
# object
|
||||||
|
for _ in range(5):
|
||||||
|
assert h == spack.concretize.concretize_one("hdf5")
|
||||||
|
@@ -11,8 +11,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
from llnl.util.filesystem import join_path, touch
|
||||||
from llnl.util.filesystem import join_path, touch, touchp
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -26,6 +25,7 @@
|
|||||||
import spack.schema.compilers
|
import spack.schema.compilers
|
||||||
import spack.schema.config
|
import spack.schema.config
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.include
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -33,6 +33,8 @@
|
|||||||
import spack.util.path as spack_path
|
import spack.util.path as spack_path
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
from ..enums import ConfigScopePriority
|
||||||
|
|
||||||
# sample config data
|
# sample config data
|
||||||
config_low = {
|
config_low = {
|
||||||
"config": {
|
"config": {
|
||||||
@@ -49,22 +51,9 @@
|
|||||||
|
|
||||||
config_override_list = {"config": {"build_stage:": ["pathd", "pathe"]}}
|
config_override_list = {"config": {"build_stage:": ["pathd", "pathe"]}}
|
||||||
|
|
||||||
config_merge_dict = {"config": {"info": {"a": 3, "b": 4}}}
|
config_merge_dict = {"config": {"aliases": {"ls": "find", "dev": "develop"}}}
|
||||||
|
|
||||||
config_override_dict = {"config": {"info:": {"a": 7, "c": 9}}}
|
config_override_dict = {"config": {"aliases:": {"be": "build-env", "deps": "dependencies"}}}
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
|
||||||
def write_config_file(tmpdir):
|
|
||||||
"""Returns a function that writes a config file."""
|
|
||||||
|
|
||||||
def _write(config, data, scope):
|
|
||||||
config_yaml = tmpdir.join(scope, config + ".yaml")
|
|
||||||
config_yaml.ensure()
|
|
||||||
with config_yaml.open("w") as f:
|
|
||||||
syaml.dump_config(data, f)
|
|
||||||
|
|
||||||
return _write
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
@@ -710,10 +699,7 @@ def assert_marked(obj):
|
|||||||
|
|
||||||
|
|
||||||
def test_internal_config_from_data():
|
def test_internal_config_from_data():
|
||||||
config = spack.config.Configuration()
|
config = spack.config.create_from(
|
||||||
|
|
||||||
# add an internal config initialized from an inline dict
|
|
||||||
config.push_scope(
|
|
||||||
spack.config.InternalConfigScope(
|
spack.config.InternalConfigScope(
|
||||||
"_builtin", {"config": {"verify_ssl": False, "build_jobs": 6}}
|
"_builtin", {"config": {"verify_ssl": False, "build_jobs": 6}}
|
||||||
)
|
)
|
||||||
@@ -1038,6 +1024,16 @@ def test_bad_config_yaml(tmpdir):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_bad_include_yaml(tmpdir):
|
||||||
|
with pytest.raises(spack.config.ConfigFormatError, match="is not of type"):
|
||||||
|
check_schema(
|
||||||
|
spack.schema.include.schema,
|
||||||
|
"""\
|
||||||
|
include: $HOME/include.yaml
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_bad_mirrors_yaml(tmpdir):
|
def test_bad_mirrors_yaml(tmpdir):
|
||||||
with pytest.raises(spack.config.ConfigFormatError):
|
with pytest.raises(spack.config.ConfigFormatError):
|
||||||
check_schema(
|
check_schema(
|
||||||
@@ -1102,9 +1098,9 @@ def test_internal_config_section_override(mock_low_high_config, write_config_fil
|
|||||||
|
|
||||||
def test_internal_config_dict_override(mock_low_high_config, write_config_file):
|
def test_internal_config_dict_override(mock_low_high_config, write_config_file):
|
||||||
write_config_file("config", config_merge_dict, "low")
|
write_config_file("config", config_merge_dict, "low")
|
||||||
wanted_dict = config_override_dict["config"]["info:"]
|
wanted_dict = config_override_dict["config"]["aliases:"]
|
||||||
mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_dict))
|
mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_dict))
|
||||||
assert mock_low_high_config.get("config:info") == wanted_dict
|
assert mock_low_high_config.get("config:aliases") == wanted_dict
|
||||||
|
|
||||||
|
|
||||||
def test_internal_config_list_override(mock_low_high_config, write_config_file):
|
def test_internal_config_list_override(mock_low_high_config, write_config_file):
|
||||||
@@ -1136,10 +1132,10 @@ def test_set_list_override(mock_low_high_config, write_config_file):
|
|||||||
|
|
||||||
def test_set_dict_override(mock_low_high_config, write_config_file):
|
def test_set_dict_override(mock_low_high_config, write_config_file):
|
||||||
write_config_file("config", config_merge_dict, "low")
|
write_config_file("config", config_merge_dict, "low")
|
||||||
wanted_dict = config_override_dict["config"]["info:"]
|
wanted_dict = config_override_dict["config"]["aliases:"]
|
||||||
with spack.config.override("config:info:", wanted_dict):
|
with spack.config.override("config:aliases:", wanted_dict):
|
||||||
assert wanted_dict == mock_low_high_config.get("config:info")
|
assert wanted_dict == mock_low_high_config.get("config:aliases")
|
||||||
assert config_merge_dict["config"]["info"] == mock_low_high_config.get("config:info")
|
assert config_merge_dict["config"]["aliases"] == mock_low_high_config.get("config:aliases")
|
||||||
|
|
||||||
|
|
||||||
def test_set_bad_path(config):
|
def test_set_bad_path(config):
|
||||||
@@ -1265,134 +1261,6 @@ def test_user_cache_path_is_default_when_env_var_is_empty(working_env):
|
|||||||
assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_cache_path()
|
assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_cache_path()
|
||||||
|
|
||||||
|
|
||||||
github_url = "https://github.com/fake/fake/{0}/develop"
|
|
||||||
gitlab_url = "https://gitlab.fake.io/user/repo/-/blob/config/defaults"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"url,isfile",
|
|
||||||
[
|
|
||||||
(github_url.format("tree"), False),
|
|
||||||
("{0}/README.md".format(github_url.format("blob")), True),
|
|
||||||
("{0}/etc/fake/defaults/packages.yaml".format(github_url.format("blob")), True),
|
|
||||||
(gitlab_url, False),
|
|
||||||
(None, False),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_config_collect_urls(mutable_empty_config, mock_spider_configs, url, isfile):
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
|
||||||
urls = spack.config.collect_urls(url)
|
|
||||||
if url:
|
|
||||||
if isfile:
|
|
||||||
expected = 1 if url.endswith(".yaml") else 0
|
|
||||||
assert len(urls) == expected
|
|
||||||
else:
|
|
||||||
# Expect multiple configuration files for a "directory"
|
|
||||||
assert len(urls) > 1
|
|
||||||
else:
|
|
||||||
assert not urls
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"url,isfile,fail",
|
|
||||||
[
|
|
||||||
(github_url.format("tree"), False, False),
|
|
||||||
(gitlab_url, False, False),
|
|
||||||
("{0}/README.md".format(github_url.format("blob")), True, True),
|
|
||||||
("{0}/compilers.yaml".format(gitlab_url), True, False),
|
|
||||||
(None, False, True),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_config_fetch_remote_configs(
|
|
||||||
tmpdir, mutable_empty_config, mock_collect_urls, mock_curl_configs, url, isfile, fail
|
|
||||||
):
|
|
||||||
def _has_content(filename):
|
|
||||||
# The first element of all configuration files for this test happen to
|
|
||||||
# be the basename of the file so this check leverages that feature. If
|
|
||||||
# that changes, then this check will need to change accordingly.
|
|
||||||
element = "{0}:".format(os.path.splitext(os.path.basename(filename))[0])
|
|
||||||
with open(filename, "r", encoding="utf-8") as fd:
|
|
||||||
for line in fd:
|
|
||||||
if element in line:
|
|
||||||
return True
|
|
||||||
tty.debug("Expected {0} in '{1}'".format(element, filename))
|
|
||||||
return False
|
|
||||||
|
|
||||||
dest_dir = join_path(tmpdir.strpath, "defaults")
|
|
||||||
if fail:
|
|
||||||
msg = "Cannot retrieve configuration"
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
|
||||||
with pytest.raises(spack.config.ConfigFileError, match=msg):
|
|
||||||
spack.config.fetch_remote_configs(url, dest_dir)
|
|
||||||
else:
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
|
||||||
path = spack.config.fetch_remote_configs(url, dest_dir)
|
|
||||||
assert os.path.exists(path)
|
|
||||||
if isfile:
|
|
||||||
# Ensure correct file is "fetched"
|
|
||||||
assert os.path.basename(path) == os.path.basename(url)
|
|
||||||
# Ensure contents of the file has expected config element
|
|
||||||
assert _has_content(path)
|
|
||||||
else:
|
|
||||||
for filename in os.listdir(path):
|
|
||||||
assert _has_content(join_path(path, filename))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
|
||||||
def mock_collect_urls(mock_config_data, monkeypatch):
|
|
||||||
"""Mock the collection of URLs to avoid mocking spider."""
|
|
||||||
|
|
||||||
_, config_files = mock_config_data
|
|
||||||
|
|
||||||
def _collect(base_url):
|
|
||||||
if not base_url:
|
|
||||||
return []
|
|
||||||
|
|
||||||
ext = os.path.splitext(base_url)[1]
|
|
||||||
if ext:
|
|
||||||
return [base_url] if ext == ".yaml" else []
|
|
||||||
|
|
||||||
return [join_path(base_url, f) for f in config_files]
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.config, "collect_urls", _collect)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"url,skip",
|
|
||||||
[(github_url.format("tree"), True), ("{0}/compilers.yaml".format(gitlab_url), True)],
|
|
||||||
)
|
|
||||||
def test_config_fetch_remote_configs_skip(
|
|
||||||
tmpdir, mutable_empty_config, mock_collect_urls, mock_curl_configs, url, skip
|
|
||||||
):
|
|
||||||
"""Ensure skip fetching remote config file if it already exists when
|
|
||||||
required and not skipping if replacing it."""
|
|
||||||
|
|
||||||
def check_contents(filename, expected):
|
|
||||||
with open(filename, "r", encoding="utf-8") as fd:
|
|
||||||
lines = fd.readlines()
|
|
||||||
if expected:
|
|
||||||
assert lines[0] == "compilers:"
|
|
||||||
else:
|
|
||||||
assert not lines
|
|
||||||
|
|
||||||
dest_dir = join_path(tmpdir.strpath, "defaults")
|
|
||||||
filename = "compilers.yaml"
|
|
||||||
|
|
||||||
# Create a stage directory with an empty configuration file
|
|
||||||
path = join_path(dest_dir, filename)
|
|
||||||
touchp(path)
|
|
||||||
|
|
||||||
# Do NOT replace the existing cached configuration file if skipping
|
|
||||||
expected = None if skip else "compilers:"
|
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
|
||||||
path = spack.config.fetch_remote_configs(url, dest_dir, skip)
|
|
||||||
result_filename = path if path.endswith(".yaml") else join_path(path, filename)
|
|
||||||
check_contents(result_filename, expected)
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_file_dir_failure(tmpdir, mutable_empty_config):
|
def test_config_file_dir_failure(tmpdir, mutable_empty_config):
|
||||||
with pytest.raises(spack.config.ConfigFileError, match="not a file"):
|
with pytest.raises(spack.config.ConfigFileError, match="not a file"):
|
||||||
spack.config.read_config_file(tmpdir.strpath)
|
spack.config.read_config_file(tmpdir.strpath)
|
||||||
@@ -1445,7 +1313,7 @@ def test_config_path_dsl(path, it_should_work, expected_parsed):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("48254")
|
@pytest.mark.regression("48254")
|
||||||
def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
|
def test_env_activation_preserves_command_line_scope(mutable_mock_env_path):
|
||||||
"""Check that the "command_line" scope remains the highest priority scope, when we activate,
|
"""Check that the "command_line" scope remains the highest priority scope, when we activate,
|
||||||
or deactivate, environments.
|
or deactivate, environments.
|
||||||
"""
|
"""
|
||||||
@@ -1469,3 +1337,51 @@ def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
|
|||||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||||
|
|
||||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("48414")
|
||||||
|
@pytest.mark.regression("49188")
|
||||||
|
def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
|
||||||
|
"""Check that the priority of scopes is respected when merging configuration files."""
|
||||||
|
custom_scope = spack.config.InternalConfigScope("custom_scope")
|
||||||
|
spack.config.CONFIG.push_scope(custom_scope, priority=ConfigScopePriority.CUSTOM)
|
||||||
|
expected_scopes_without_env = ["custom_scope", "command_line"]
|
||||||
|
expected_scopes_with_first_env = ["custom_scope", "env:test", "command_line"]
|
||||||
|
expected_scopes_with_second_env = ["custom_scope", "env:test-2", "command_line"]
|
||||||
|
|
||||||
|
def highest_priority_scopes(config, *, nscopes):
|
||||||
|
return list(config.scopes)[-nscopes:]
|
||||||
|
|
||||||
|
assert highest_priority_scopes(spack.config.CONFIG, nscopes=2) == expected_scopes_without_env
|
||||||
|
# Creating an environment pushes a new scope
|
||||||
|
ev.create("test")
|
||||||
|
with ev.read("test"):
|
||||||
|
assert (
|
||||||
|
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
|
||||||
|
== expected_scopes_with_first_env
|
||||||
|
)
|
||||||
|
|
||||||
|
# No active environment pops the scope
|
||||||
|
with ev.no_active_environment():
|
||||||
|
assert (
|
||||||
|
highest_priority_scopes(spack.config.CONFIG, nscopes=2)
|
||||||
|
== expected_scopes_without_env
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
|
||||||
|
== expected_scopes_with_first_env
|
||||||
|
)
|
||||||
|
|
||||||
|
# Switch the environment to another one
|
||||||
|
ev.create("test-2")
|
||||||
|
with ev.read("test-2"):
|
||||||
|
assert (
|
||||||
|
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
|
||||||
|
== expected_scopes_with_second_env
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
|
||||||
|
== expected_scopes_with_first_env
|
||||||
|
)
|
||||||
|
|
||||||
|
assert highest_priority_scopes(spack.config.CONFIG, nscopes=2) == expected_scopes_without_env
|
||||||
|
@@ -30,7 +30,15 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.lock
|
import llnl.util.lock
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, touchp, working_dir
|
from llnl.util.filesystem import (
|
||||||
|
copy,
|
||||||
|
copy_tree,
|
||||||
|
join_path,
|
||||||
|
mkdirp,
|
||||||
|
remove_linked_tree,
|
||||||
|
touchp,
|
||||||
|
working_dir,
|
||||||
|
)
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
@@ -65,6 +73,9 @@
|
|||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
from spack.util.pattern import Bunch
|
from spack.util.pattern import Bunch
|
||||||
|
from spack.util.remote_file_cache import raw_github_gitlab_url
|
||||||
|
|
||||||
|
from ..enums import ConfigScopePriority
|
||||||
|
|
||||||
mirror_cmd = SpackCommand("mirror")
|
mirror_cmd = SpackCommand("mirror")
|
||||||
|
|
||||||
@@ -339,6 +350,16 @@ def pytest_collection_modifyitems(config, items):
|
|||||||
item.add_marker(skip_as_slow)
|
item.add_marker(skip_as_slow)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def use_concretization_cache(mutable_config, tmpdir):
|
||||||
|
"""Enables the use of the concretization cache"""
|
||||||
|
spack.config.set("config:concretization_cache:enable", True)
|
||||||
|
# ensure we have an isolated concretization cache
|
||||||
|
new_conc_cache_loc = str(tmpdir.mkdir("concretization"))
|
||||||
|
spack.config.set("config:concretization_cache:path", new_conc_cache_loc)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# These fixtures are applied to all tests
|
# These fixtures are applied to all tests
|
||||||
#
|
#
|
||||||
@@ -723,11 +744,23 @@ def configuration_dir(tmpdir_factory, linux_os):
|
|||||||
def _create_mock_configuration_scopes(configuration_dir):
|
def _create_mock_configuration_scopes(configuration_dir):
|
||||||
"""Create the configuration scopes used in `config` and `mutable_config`."""
|
"""Create the configuration scopes used in `config` and `mutable_config`."""
|
||||||
return [
|
return [
|
||||||
|
(
|
||||||
|
ConfigScopePriority.BUILTIN,
|
||||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS),
|
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ConfigScopePriority.CONFIG_FILES,
|
||||||
spack.config.DirectoryConfigScope("site", str(configuration_dir.join("site"))),
|
spack.config.DirectoryConfigScope("site", str(configuration_dir.join("site"))),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ConfigScopePriority.CONFIG_FILES,
|
||||||
spack.config.DirectoryConfigScope("system", str(configuration_dir.join("system"))),
|
spack.config.DirectoryConfigScope("system", str(configuration_dir.join("system"))),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ConfigScopePriority.CONFIG_FILES,
|
||||||
spack.config.DirectoryConfigScope("user", str(configuration_dir.join("user"))),
|
spack.config.DirectoryConfigScope("user", str(configuration_dir.join("user"))),
|
||||||
spack.config.InternalConfigScope("command_line"),
|
),
|
||||||
|
(ConfigScopePriority.COMMAND_LINE, spack.config.InternalConfigScope("command_line")),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -794,13 +827,11 @@ def mock_wsdk_externals(monkeypatch_session):
|
|||||||
def concretize_scope(mutable_config, tmpdir):
|
def concretize_scope(mutable_config, tmpdir):
|
||||||
"""Adds a scope for concretization preferences"""
|
"""Adds a scope for concretization preferences"""
|
||||||
tmpdir.ensure_dir("concretize")
|
tmpdir.ensure_dir("concretize")
|
||||||
mutable_config.push_scope(
|
with spack.config.override(
|
||||||
spack.config.DirectoryConfigScope("concretize", str(tmpdir.join("concretize")))
|
spack.config.DirectoryConfigScope("concretize", str(tmpdir.join("concretize")))
|
||||||
)
|
):
|
||||||
|
|
||||||
yield str(tmpdir.join("concretize"))
|
yield str(tmpdir.join("concretize"))
|
||||||
|
|
||||||
mutable_config.pop_scope()
|
|
||||||
spack.repo.PATH._provider_index = None
|
spack.repo.PATH._provider_index = None
|
||||||
|
|
||||||
|
|
||||||
@@ -1884,35 +1915,21 @@ def __call__(self, *args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def mock_spider_configs(mock_config_data, monkeypatch):
|
def mock_fetch_url_text(tmpdir, mock_config_data, monkeypatch):
|
||||||
"""
|
"""Mock spack.util.web.fetch_url_text."""
|
||||||
Mock retrieval of configuration file URLs from the web by grabbing
|
|
||||||
them from the test data configuration directory.
|
|
||||||
"""
|
|
||||||
config_data_dir, config_files = mock_config_data
|
|
||||||
|
|
||||||
def _spider(*args, **kwargs):
|
stage_dir, config_files = mock_config_data
|
||||||
root_urls = args[0]
|
|
||||||
if not root_urls:
|
|
||||||
return [], set()
|
|
||||||
|
|
||||||
root_urls = [root_urls] if isinstance(root_urls, str) else root_urls
|
def _fetch_text_file(url, dest_dir):
|
||||||
|
raw_url = raw_github_gitlab_url(url)
|
||||||
|
mkdirp(dest_dir)
|
||||||
|
basename = os.path.basename(raw_url)
|
||||||
|
src = join_path(stage_dir, basename)
|
||||||
|
dest = join_path(dest_dir, basename)
|
||||||
|
copy(src, dest)
|
||||||
|
return dest
|
||||||
|
|
||||||
# Any URL with an extension will be treated like a file; otherwise,
|
monkeypatch.setattr(spack.util.web, "fetch_url_text", _fetch_text_file)
|
||||||
# it is considered a directory/folder and we'll grab all available
|
|
||||||
# files.
|
|
||||||
urls = []
|
|
||||||
for url in root_urls:
|
|
||||||
if os.path.splitext(url)[1]:
|
|
||||||
urls.append(url)
|
|
||||||
else:
|
|
||||||
urls.extend([os.path.join(url, f) for f in config_files])
|
|
||||||
|
|
||||||
return [], set(urls)
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.util.web, "spider", _spider)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
@@ -2126,8 +2143,7 @@ def _c_compiler_always_exists():
|
|||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def mock_test_cache(tmp_path_factory):
|
def mock_test_cache(tmp_path_factory):
|
||||||
cache_dir = tmp_path_factory.mktemp("cache")
|
cache_dir = tmp_path_factory.mktemp("cache")
|
||||||
print(cache_dir)
|
return spack.util.file_cache.FileCache(cache_dir)
|
||||||
return spack.util.file_cache.FileCache(str(cache_dir))
|
|
||||||
|
|
||||||
|
|
||||||
class MockHTTPResponse(io.IOBase):
|
class MockHTTPResponse(io.IOBase):
|
||||||
@@ -2176,3 +2192,27 @@ def info(self):
|
|||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def mock_runtimes(config, mock_packages):
|
def mock_runtimes(config, mock_packages):
|
||||||
return mock_packages.packages_with_tags("runtime")
|
return mock_packages.packages_with_tags("runtime")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def write_config_file(tmpdir):
|
||||||
|
"""Returns a function that writes a config file."""
|
||||||
|
|
||||||
|
def _write(config, data, scope):
|
||||||
|
config_yaml = tmpdir.join(scope, config + ".yaml")
|
||||||
|
config_yaml.ensure()
|
||||||
|
with config_yaml.open("w") as f:
|
||||||
|
syaml.dump_config(data, f)
|
||||||
|
return config_yaml
|
||||||
|
|
||||||
|
return _write
|
||||||
|
|
||||||
|
|
||||||
|
def _include_cache_root():
|
||||||
|
return join_path(str(tempfile.mkdtemp()), "user_cache", "includes")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def mock_include_cache(monkeypatch):
|
||||||
|
"""Override the include cache directory so tests don't pollute user cache."""
|
||||||
|
monkeypatch.setattr(spack.config, "_include_cache_location", _include_cache_root)
|
||||||
|
@@ -14,3 +14,5 @@ config:
|
|||||||
checksum: true
|
checksum: true
|
||||||
dirty: false
|
dirty: false
|
||||||
locks: {1}
|
locks: {1}
|
||||||
|
concretization_cache:
|
||||||
|
enable: false
|
||||||
|
@@ -161,7 +161,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages, tmp_path
|
|||||||
"""
|
"""
|
||||||
layout = temporary_store.layout
|
layout = temporary_store.layout
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
||||||
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
||||||
|
|
||||||
not_in_mock = set.difference(
|
not_in_mock = set.difference(
|
||||||
|
@@ -12,6 +12,7 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.platforms
|
||||||
import spack.solver.asp
|
import spack.solver.asp
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.environment.environment import (
|
from spack.environment.environment import (
|
||||||
@@ -519,7 +520,9 @@ def test_error_message_when_using_too_new_lockfile(tmp_path):
|
|||||||
("when_possible", True),
|
("when_possible", True),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_environment_concretizer_scheme_used(tmp_path, unify_in_lower_scope, unify_in_spack_yaml):
|
def test_environment_concretizer_scheme_used(
|
||||||
|
tmp_path, mutable_config, unify_in_lower_scope, unify_in_spack_yaml
|
||||||
|
):
|
||||||
"""Tests that "unify" settings in spack.yaml always take precedence over settings in lower
|
"""Tests that "unify" settings in spack.yaml always take precedence over settings in lower
|
||||||
configuration scopes.
|
configuration scopes.
|
||||||
"""
|
"""
|
||||||
@@ -533,9 +536,10 @@ def test_environment_concretizer_scheme_used(tmp_path, unify_in_lower_scope, uni
|
|||||||
unify: {str(unify_in_spack_yaml).lower()}
|
unify: {str(unify_in_spack_yaml).lower()}
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
mutable_config.set("concretizer:unify", unify_in_lower_scope)
|
||||||
with spack.config.override("concretizer:unify", unify_in_lower_scope):
|
assert mutable_config.get("concretizer:unify") == unify_in_lower_scope
|
||||||
with ev.Environment(manifest.parent) as e:
|
with ev.Environment(manifest.parent) as e:
|
||||||
|
assert mutable_config.get("concretizer:unify") == unify_in_spack_yaml
|
||||||
assert e.unify == unify_in_spack_yaml
|
assert e.unify == unify_in_spack_yaml
|
||||||
|
|
||||||
|
|
||||||
@@ -918,3 +922,50 @@ def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_
|
|||||||
|
|
||||||
with pytest.raises(ev.SpackEnvironmentError, match="no such environment"):
|
with pytest.raises(ev.SpackEnvironmentError, match="no such environment"):
|
||||||
_ = ev.environment_from_name_or_dir("fake-env")
|
_ = ev.environment_from_name_or_dir("fake-env")
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_include_configs(mutable_mock_env_path, mock_packages):
|
||||||
|
"""check config and package values using new include schema"""
|
||||||
|
env_path = mutable_mock_env_path
|
||||||
|
env_path.mkdir()
|
||||||
|
|
||||||
|
this_os = spack.platforms.host().default_os
|
||||||
|
config_root = env_path / this_os
|
||||||
|
config_root.mkdir()
|
||||||
|
config_path = str(config_root / "config.yaml")
|
||||||
|
with open(config_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
config:
|
||||||
|
verify_ssl: False
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
packages_path = str(env_path / "packages.yaml")
|
||||||
|
with open(packages_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
packages:
|
||||||
|
python:
|
||||||
|
require:
|
||||||
|
- spec: "@3.11:"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
spack_yaml = env_path / ev.manifest_name
|
||||||
|
spack_yaml.write_text(
|
||||||
|
f"""\
|
||||||
|
spack:
|
||||||
|
include:
|
||||||
|
- path: {config_path}
|
||||||
|
optional: true
|
||||||
|
- path: {packages_path}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
e = ev.Environment(env_path)
|
||||||
|
with e.manifest.use_config():
|
||||||
|
assert not spack.config.get("config:verify_ssl")
|
||||||
|
python_reqs = spack.config.get("packages")["python"]["require"]
|
||||||
|
req_specs = set(x["spec"] for x in python_reqs)
|
||||||
|
assert req_specs == set(["@3.11:"])
|
||||||
|
@@ -680,13 +680,19 @@ def test_install_spliced_build_spec_installed(install_mockery, capfd, mock_fetch
|
|||||||
assert node.build_spec.installed
|
assert node.build_spec.installed
|
||||||
|
|
||||||
|
|
||||||
|
# Unit tests should not be affected by the user's managed environments
|
||||||
@pytest.mark.not_on_windows("lacking windows support for binary installs")
|
@pytest.mark.not_on_windows("lacking windows support for binary installs")
|
||||||
@pytest.mark.parametrize("transitive", [True, False])
|
@pytest.mark.parametrize("transitive", [True, False])
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"root_str", ["splice-t^splice-h~foo", "splice-h~foo", "splice-vt^splice-a"]
|
"root_str", ["splice-t^splice-h~foo", "splice-h~foo", "splice-vt^splice-a"]
|
||||||
)
|
)
|
||||||
def test_install_splice_root_from_binary(
|
def test_install_splice_root_from_binary(
|
||||||
install_mockery, mock_fetch, mutable_temporary_mirror, transitive, root_str
|
mutable_mock_env_path,
|
||||||
|
install_mockery,
|
||||||
|
mock_fetch,
|
||||||
|
mutable_temporary_mirror,
|
||||||
|
transitive,
|
||||||
|
root_str,
|
||||||
):
|
):
|
||||||
"""Test installing a spliced spec with the root available in binary cache"""
|
"""Test installing a spliced spec with the root available in binary cache"""
|
||||||
# Test splicing and rewiring a spec with the same name, different hash.
|
# Test splicing and rewiring a spec with the same name, different hash.
|
||||||
@@ -977,7 +983,6 @@ class MyBuildException(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
|
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
|
||||||
print(task, task.pkg.name)
|
|
||||||
if task.pkg.name == "pkg-a":
|
if task.pkg.name == "pkg-a":
|
||||||
raise MyBuildException("mock internal package build error for pkg-a")
|
raise MyBuildException("mock internal package build error for pkg-a")
|
||||||
else:
|
else:
|
||||||
|
@@ -364,3 +364,44 @@ def test_fnmatch_multiple():
|
|||||||
assert not regex.match("libbar.so.1")
|
assert not regex.match("libbar.so.1")
|
||||||
assert not regex.match("libfoo.solibbar.so")
|
assert not regex.match("libfoo.solibbar.so")
|
||||||
assert not regex.match("libbaz.so")
|
assert not regex.match("libbaz.so")
|
||||||
|
|
||||||
|
|
||||||
|
class TestPriorityOrderedMapping:
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"elements,expected",
|
||||||
|
[
|
||||||
|
# Push out-of-order with explicit, and different, priorities
|
||||||
|
([("b", 2), ("a", 1), ("d", 4), ("c", 3)], ["a", "b", "c", "d"]),
|
||||||
|
# Push in-order with priority=None
|
||||||
|
([("a", None), ("b", None), ("c", None), ("d", None)], ["a", "b", "c", "d"]),
|
||||||
|
# Mix explicit and implicit priorities
|
||||||
|
([("b", 2), ("c", None), ("a", 1), ("d", None)], ["a", "b", "c", "d"]),
|
||||||
|
([("b", 10), ("c", None), ("a", -20), ("d", None)], ["a", "b", "c", "d"]),
|
||||||
|
([("b", 10), ("c", None), ("a", 20), ("d", None)], ["b", "c", "a", "d"]),
|
||||||
|
# Adding the same key twice with different priorities
|
||||||
|
([("b", 10), ("c", None), ("a", 20), ("d", None), ("a", -20)], ["a", "b", "c", "d"]),
|
||||||
|
# Adding the same key twice, no priorities
|
||||||
|
([("b", None), ("a", None), ("b", None)], ["a", "b"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_iteration_order(self, elements, expected):
|
||||||
|
"""Tests that the iteration order respects priorities, no matter the insertion order."""
|
||||||
|
m = llnl.util.lang.PriorityOrderedMapping()
|
||||||
|
for key, priority in elements:
|
||||||
|
m.add(key, value=None, priority=priority)
|
||||||
|
assert list(m) == expected
|
||||||
|
|
||||||
|
def test_reverse_iteration(self):
|
||||||
|
"""Tests that we can conveniently use reverse iteration"""
|
||||||
|
m = llnl.util.lang.PriorityOrderedMapping()
|
||||||
|
for key, value in [("a", 1), ("b", 2), ("c", 3)]:
|
||||||
|
m.add(key, value=value)
|
||||||
|
|
||||||
|
assert list(m) == ["a", "b", "c"]
|
||||||
|
assert list(reversed(m)) == ["c", "b", "a"]
|
||||||
|
|
||||||
|
assert list(m.keys()) == ["a", "b", "c"]
|
||||||
|
assert list(m.reversed_keys()) == ["c", "b", "a"]
|
||||||
|
|
||||||
|
assert list(m.values()) == [1, 2, 3]
|
||||||
|
assert list(m.reversed_values()) == [3, 2, 1]
|
||||||
|
@@ -3,6 +3,9 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -13,8 +16,10 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
import spack.util.executable as exe
|
import spack.util.executable as exe
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
pytestmark = pytest.mark.not_on_windows(
|
pytestmark = pytest.mark.not_on_windows(
|
||||||
"Test functionality supported but tests are failing on Win"
|
"Test functionality supported but tests are failing on Win"
|
||||||
@@ -167,3 +172,163 @@ def test_add_command_line_scope_env(tmp_path, mutable_mock_env_path):
|
|||||||
assert config.get("config:install_tree:root") == "/tmp/first"
|
assert config.get("config:install_tree:root") == "/tmp/first"
|
||||||
|
|
||||||
assert ev.active_environment() is None # shouldn't cause an environment to be activated
|
assert ev.active_environment() is None # shouldn't cause an environment to be activated
|
||||||
|
|
||||||
|
|
||||||
|
def test_include_cfg(mock_low_high_config, write_config_file, tmpdir):
|
||||||
|
cfg1_path = str(tmpdir.join("include1.yaml"))
|
||||||
|
with open(cfg1_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
config:
|
||||||
|
verify_ssl: False
|
||||||
|
dirty: True
|
||||||
|
packages:
|
||||||
|
python:
|
||||||
|
require:
|
||||||
|
- spec: "@3.11:"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def python_cfg(_spec):
|
||||||
|
return f"""\
|
||||||
|
packages:
|
||||||
|
python:
|
||||||
|
require:
|
||||||
|
- spec: {_spec}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def write_python_cfg(_spec, _cfg_name):
|
||||||
|
cfg_path = str(tmpdir.join(_cfg_name))
|
||||||
|
with open(cfg_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(python_cfg(_spec))
|
||||||
|
return cfg_path
|
||||||
|
|
||||||
|
# This config will not be included
|
||||||
|
cfg2_path = write_python_cfg("+shared", "include2.yaml")
|
||||||
|
|
||||||
|
# The config will point to this using substitutable variables,
|
||||||
|
# namely $os; we expect that Spack resolves these variables
|
||||||
|
# into the actual path of the config
|
||||||
|
this_os = spack.platforms.host().default_os
|
||||||
|
cfg3_expanded_path = os.path.join(str(tmpdir), f"{this_os}", "include3.yaml")
|
||||||
|
fs.mkdirp(os.path.dirname(cfg3_expanded_path))
|
||||||
|
with open(cfg3_expanded_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(python_cfg("+ssl"))
|
||||||
|
cfg3_abstract_path = os.path.join(str(tmpdir), "$os", "include3.yaml")
|
||||||
|
|
||||||
|
# This will be included unconditionally
|
||||||
|
cfg4_path = write_python_cfg("+tk", "include4.yaml")
|
||||||
|
|
||||||
|
# This config will not exist, and the config will explicitly
|
||||||
|
# allow this
|
||||||
|
cfg5_path = os.path.join(str(tmpdir), "non-existent.yaml")
|
||||||
|
|
||||||
|
include_entries = [
|
||||||
|
{"path": f"{cfg1_path}", "when": f'os == "{this_os}"'},
|
||||||
|
{"path": f"{cfg2_path}", "when": "False"},
|
||||||
|
{"path": cfg3_abstract_path},
|
||||||
|
cfg4_path,
|
||||||
|
{"path": cfg5_path, "optional": True},
|
||||||
|
]
|
||||||
|
include_cfg = {"include": include_entries}
|
||||||
|
filename = write_config_file("include", include_cfg, "low")
|
||||||
|
|
||||||
|
assert not spack.config.get("config:dirty")
|
||||||
|
|
||||||
|
spack.main.add_command_line_scopes(mock_low_high_config, [os.path.dirname(filename)])
|
||||||
|
|
||||||
|
assert spack.config.get("config:dirty")
|
||||||
|
python_reqs = spack.config.get("packages")["python"]["require"]
|
||||||
|
req_specs = set(x["spec"] for x in python_reqs)
|
||||||
|
assert req_specs == set(["@3.11:", "+ssl", "+tk"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_include_duplicate_source(tmpdir, mutable_config):
|
||||||
|
"""Check precedence when include.yaml files have the same path."""
|
||||||
|
include_yaml = "debug.yaml"
|
||||||
|
include_list = {"include": [f"./{include_yaml}"]}
|
||||||
|
|
||||||
|
system_filename = mutable_config.get_config_filename("system", "include")
|
||||||
|
site_filename = mutable_config.get_config_filename("site", "include")
|
||||||
|
|
||||||
|
def write_configs(include_path, debug_data):
|
||||||
|
fs.mkdirp(os.path.dirname(include_path))
|
||||||
|
with open(include_path, "w", encoding="utf-8") as f:
|
||||||
|
syaml.dump_config(include_list, f)
|
||||||
|
|
||||||
|
debug_path = fs.join_path(os.path.dirname(include_path), include_yaml)
|
||||||
|
with open(debug_path, "w", encoding="utf-8") as f:
|
||||||
|
syaml.dump_config(debug_data, f)
|
||||||
|
|
||||||
|
system_config = {"config": {"debug": False}}
|
||||||
|
write_configs(system_filename, system_config)
|
||||||
|
spack.main.add_command_line_scopes(mutable_config, [os.path.dirname(system_filename)])
|
||||||
|
|
||||||
|
site_config = {"config": {"debug": True}}
|
||||||
|
write_configs(site_filename, site_config)
|
||||||
|
spack.main.add_command_line_scopes(mutable_config, [os.path.dirname(site_filename)])
|
||||||
|
|
||||||
|
# Ensure takes the last value of the option pushed onto the stack
|
||||||
|
assert mutable_config.get("config:debug") == site_config["config"]["debug"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_include_recurse_limit(tmpdir, mutable_config):
|
||||||
|
"""Ensure hit the recursion limit."""
|
||||||
|
include_yaml = "include.yaml"
|
||||||
|
include_list = {"include": [f"./{include_yaml}"]}
|
||||||
|
|
||||||
|
include_path = str(tmpdir.join(include_yaml))
|
||||||
|
with open(include_path, "w", encoding="utf-8") as f:
|
||||||
|
syaml.dump_config(include_list, f)
|
||||||
|
|
||||||
|
with pytest.raises(spack.config.RecursiveIncludeError, match="recursion exceeded"):
|
||||||
|
spack.main.add_command_line_scopes(mutable_config, [os.path.dirname(include_path)])
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Fix this once recursive includes are processed in the expected order.
|
||||||
|
@pytest.mark.parametrize("child,expected", [("b", True), ("c", False)])
|
||||||
|
def test_include_recurse_diamond(tmpdir, mutable_config, child, expected):
|
||||||
|
"""Demonstrate include parent's value overrides that of child in diamond include.
|
||||||
|
|
||||||
|
Check that the value set by b or c overrides that set by d.
|
||||||
|
"""
|
||||||
|
configs_root = tmpdir.join("configs")
|
||||||
|
configs_root.mkdir()
|
||||||
|
|
||||||
|
def write(path, contents):
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
def debug_contents(value):
|
||||||
|
return f"config:\n debug: {value}\n"
|
||||||
|
|
||||||
|
def include_contents(paths):
|
||||||
|
indent = "\n - "
|
||||||
|
values = indent.join([str(p) for p in paths])
|
||||||
|
return f"include:{indent}{values}"
|
||||||
|
|
||||||
|
a_yaml = tmpdir.join("a.yaml")
|
||||||
|
b_yaml = configs_root.join("b.yaml")
|
||||||
|
c_yaml = configs_root.join("c.yaml")
|
||||||
|
d_yaml = configs_root.join("d.yaml")
|
||||||
|
debug_yaml = configs_root.join("enable_debug.yaml")
|
||||||
|
|
||||||
|
write(debug_yaml, debug_contents("true"))
|
||||||
|
|
||||||
|
a_contents = f"""\
|
||||||
|
include:
|
||||||
|
- {b_yaml}
|
||||||
|
- {c_yaml}
|
||||||
|
"""
|
||||||
|
write(a_yaml, a_contents)
|
||||||
|
write(d_yaml, debug_contents("false"))
|
||||||
|
|
||||||
|
write(b_yaml, include_contents([debug_yaml, d_yaml] if child == "b" else [d_yaml]))
|
||||||
|
write(c_yaml, include_contents([debug_yaml, d_yaml] if child == "c" else [d_yaml]))
|
||||||
|
|
||||||
|
spack.main.add_command_line_scopes(mutable_config, [str(tmpdir)])
|
||||||
|
|
||||||
|
try:
|
||||||
|
assert mutable_config.get("config:debug") is expected
|
||||||
|
except AssertionError:
|
||||||
|
pytest.xfail("recursive includes are not processed in the expected order")
|
||||||
|
@@ -34,7 +34,7 @@ def extra_repo(tmp_path_factory, request):
|
|||||||
subdirectory: '{request.param}'
|
subdirectory: '{request.param}'
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
repo_cache = spack.util.file_cache.FileCache(cache_dir)
|
||||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
||||||
|
|
||||||
|
|
||||||
@@ -194,7 +194,7 @@ def _repo_paths(repos):
|
|||||||
|
|
||||||
repo_paths, namespaces = _repo_paths(repos)
|
repo_paths, namespaces = _repo_paths(repos)
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
||||||
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
||||||
assert len(repo_path.repos) == len(namespaces)
|
assert len(repo_path.repos) == len(namespaces)
|
||||||
assert [x.namespace for x in repo_path.repos] == namespaces
|
assert [x.namespace for x in repo_path.repos] == namespaces
|
||||||
@@ -319,3 +319,48 @@ def test_get_repo(self, mock_test_cache):
|
|||||||
# foo is not there, raise
|
# foo is not there, raise
|
||||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||||
repo.get_repo("foo")
|
repo.get_repo("foo")
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_package_api_version():
|
||||||
|
"""Test that we raise an error if a repository has a version that is not supported."""
|
||||||
|
# valid version
|
||||||
|
assert spack.repo._parse_package_api_version(
|
||||||
|
{"api": "v1.2"}, min_api=(1, 0), max_api=(2, 3)
|
||||||
|
) == (1, 2)
|
||||||
|
# too new and too old
|
||||||
|
with pytest.raises(
|
||||||
|
spack.repo.BadRepoError,
|
||||||
|
match=r"Package API v2.4 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||||
|
):
|
||||||
|
spack.repo._parse_package_api_version({"api": "v2.4"}, min_api=(1, 0), max_api=(2, 3))
|
||||||
|
with pytest.raises(
|
||||||
|
spack.repo.BadRepoError,
|
||||||
|
match=r"Package API v0.9 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||||
|
):
|
||||||
|
spack.repo._parse_package_api_version({"api": "v0.9"}, min_api=(1, 0), max_api=(2, 3))
|
||||||
|
# default to v1.0 if not specified
|
||||||
|
assert spack.repo._parse_package_api_version({}, min_api=(1, 0), max_api=(2, 3)) == (1, 0)
|
||||||
|
# if v1.0 support is dropped we should also raise
|
||||||
|
with pytest.raises(
|
||||||
|
spack.repo.BadRepoError,
|
||||||
|
match=r"Package API v1.0 is not supported .* \(must be between v2.0 and v2.3\)",
|
||||||
|
):
|
||||||
|
spack.repo._parse_package_api_version({}, min_api=(2, 0), max_api=(2, 3))
|
||||||
|
# finally test invalid input
|
||||||
|
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||||
|
spack.repo._parse_package_api_version({"api": "v2"}, min_api=(1, 0), max_api=(3, 3))
|
||||||
|
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||||
|
spack.repo._parse_package_api_version({"api": 2.0}, min_api=(1, 0), max_api=(3, 3))
|
||||||
|
|
||||||
|
|
||||||
|
def test_repo_package_api_version(tmp_path: pathlib.Path):
|
||||||
|
"""Test that we can specify the API version of a repository."""
|
||||||
|
(tmp_path / "example" / "packages").mkdir(parents=True)
|
||||||
|
(tmp_path / "example" / "repo.yaml").write_text(
|
||||||
|
"""\
|
||||||
|
repo:
|
||||||
|
namespace: example
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
||||||
|
assert spack.repo.Repo(str(tmp_path / "example"), cache=cache).package_api == (1, 0)
|
||||||
|
@@ -27,9 +27,7 @@ def check_spliced_spec_prefixes(spliced_spec):
|
|||||||
text_file_path = os.path.join(node.prefix, node.name)
|
text_file_path = os.path.join(node.prefix, node.name)
|
||||||
with open(text_file_path, "r", encoding="utf-8") as f:
|
with open(text_file_path, "r", encoding="utf-8") as f:
|
||||||
text = f.read()
|
text = f.read()
|
||||||
print(text)
|
|
||||||
for modded_spec in node.traverse(root=True, deptype=dt.ALL & ~dt.BUILD):
|
for modded_spec in node.traverse(root=True, deptype=dt.ALL & ~dt.BUILD):
|
||||||
print(modded_spec)
|
|
||||||
assert modded_spec.prefix in text
|
assert modded_spec.prefix in text
|
||||||
|
|
||||||
|
|
||||||
|
@@ -17,7 +17,7 @@
|
|||||||
def validate_spec_schema():
|
def validate_spec_schema():
|
||||||
return {
|
return {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"validate_spec": True,
|
"additionalKeysAreSpecs": True,
|
||||||
"patternProperties": {r"\w[\w-]*": {"type": "string"}},
|
"patternProperties": {r"\w[\w-]*": {"type": "string"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ def module_suffixes_schema():
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"suffixes": {
|
"suffixes": {
|
||||||
"validate_spec": True,
|
"additionalKeysAreSpecs": True,
|
||||||
"patternProperties": {r"\w[\w-]*": {"type": "string"}},
|
"patternProperties": {r"\w[\w-]*": {"type": "string"}},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -84,6 +84,7 @@ def test_module_suffixes(module_suffixes_schema):
|
|||||||
"compilers",
|
"compilers",
|
||||||
"config",
|
"config",
|
||||||
"definitions",
|
"definitions",
|
||||||
|
"include",
|
||||||
"env",
|
"env",
|
||||||
"merged",
|
"merged",
|
||||||
"mirrors",
|
"mirrors",
|
||||||
|
@@ -427,6 +427,9 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
|||||||
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
|
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
|
||||||
assert len(openmpi_edges) == 1
|
assert len(openmpi_edges) == 1
|
||||||
|
|
||||||
|
# Check that virtuals have been reconstructed
|
||||||
|
assert "mpi" in openmpi_edges[0].virtuals
|
||||||
|
|
||||||
# The virtuals attribute must be a tuple, when read from a
|
# The virtuals attribute must be a tuple, when read from a
|
||||||
# JSON or YAML file, not a list
|
# JSON or YAML file, not a list
|
||||||
for edge in s2.traverse_edges():
|
for edge in s2.traverse_edges():
|
||||||
|
@@ -149,11 +149,8 @@ def test_reverse_environment_modifications(working_env):
|
|||||||
os.environ.clear()
|
os.environ.clear()
|
||||||
os.environ.update(start_env)
|
os.environ.update(start_env)
|
||||||
|
|
||||||
print(os.environ)
|
|
||||||
to_reverse.apply_modifications()
|
to_reverse.apply_modifications()
|
||||||
print(os.environ)
|
|
||||||
reversal.apply_modifications()
|
reversal.apply_modifications()
|
||||||
print(os.environ)
|
|
||||||
|
|
||||||
start_env.pop("UNSET")
|
start_env.pop("UNSET")
|
||||||
assert os.environ == start_env
|
assert os.environ == start_env
|
||||||
|
97
lib/spack/spack/test/util/remote_file_cache.py
Normal file
97
lib/spack/spack/test/util/remote_file_cache.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import join_path
|
||||||
|
|
||||||
|
import spack.config
|
||||||
|
import spack.util.remote_file_cache as rfc_util
|
||||||
|
|
||||||
|
github_url = "https://github.com/fake/fake/{0}/develop"
|
||||||
|
gitlab_url = "https://gitlab.fake.io/user/repo/-/blob/config/defaults"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"path,err",
|
||||||
|
[
|
||||||
|
("ssh://git@github.com:spack/", "Unsupported URL scheme"),
|
||||||
|
("bad:///this/is/a/file/url/include.yaml", "Invalid URL scheme"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_rfc_local_path_bad_scheme(path, err):
|
||||||
|
with pytest.raises(ValueError, match=err):
|
||||||
|
_ = rfc_util.local_path(path, "")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"path", ["/a/b/c/d/e/config.py", "file:///this/is/a/file/url/include.yaml"]
|
||||||
|
)
|
||||||
|
def test_rfc_local_path_file(path):
|
||||||
|
actual = path.split("://")[1] if ":" in path else path
|
||||||
|
assert rfc_util.local_path(path, "") == os.path.normpath(actual)
|
||||||
|
|
||||||
|
|
||||||
|
def test_rfc_remote_local_path_no_dest():
|
||||||
|
path = f"{gitlab_url}/packages.yaml"
|
||||||
|
with pytest.raises(ValueError, match="Requires the destination argument"):
|
||||||
|
_ = rfc_util.local_path(path, "")
|
||||||
|
|
||||||
|
|
||||||
|
compilers_sha256 = (
|
||||||
|
"381732677538143a8f900406c0654f2730e2919a11740bdeaf35757ab3e1ef3e"
|
||||||
|
if sys.platform == "win32"
|
||||||
|
else "e91148ed5a0da7844e9f3f9cfce0fa60cce509461886bc3b006ee9eb711f69df"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"url,sha256,err,msg",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
f"{join_path(github_url.format('tree'), 'config.yaml')}",
|
||||||
|
"",
|
||||||
|
ValueError,
|
||||||
|
"Requires sha256",
|
||||||
|
),
|
||||||
|
(f"{gitlab_url}/compilers.yaml", compilers_sha256, None, ""),
|
||||||
|
(f"{gitlab_url}/packages.yaml", "abcdef", ValueError, "does not match"),
|
||||||
|
(f"{github_url.format('blob')}/README.md", "", OSError, "No such"),
|
||||||
|
(github_url.format("tree"), "", OSError, "No such"),
|
||||||
|
("", "", ValueError, "argument is required"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_rfc_remote_local_path(
|
||||||
|
tmpdir, mutable_empty_config, mock_fetch_url_text, url, sha256, err, msg
|
||||||
|
):
|
||||||
|
def _has_content(filename):
|
||||||
|
# The first element of all configuration files for this test happen to
|
||||||
|
# be the basename of the file so this check leverages that feature. If
|
||||||
|
# that changes, then this check will need to change accordingly.
|
||||||
|
element = f"{os.path.splitext(os.path.basename(filename))[0]}:"
|
||||||
|
with open(filename, "r", encoding="utf-8") as fd:
|
||||||
|
for line in fd:
|
||||||
|
if element in line:
|
||||||
|
return True
|
||||||
|
tty.debug(f"Expected {element} in '{filename}'")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _dest_dir():
|
||||||
|
return join_path(tmpdir.strpath, "cache")
|
||||||
|
|
||||||
|
if err is not None:
|
||||||
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
|
with pytest.raises(err, match=msg):
|
||||||
|
rfc_util.local_path(url, sha256, _dest_dir)
|
||||||
|
else:
|
||||||
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
|
path = rfc_util.local_path(url, sha256, _dest_dir)
|
||||||
|
assert os.path.exists(path)
|
||||||
|
# Ensure correct file is "fetched"
|
||||||
|
assert os.path.basename(path) == os.path.basename(url)
|
||||||
|
# Ensure contents of the file contains expected config element
|
||||||
|
assert _has_content(path)
|
@@ -257,7 +257,6 @@ def test_core_lib_files():
|
|||||||
names.append(os.path.join(test_dir, n))
|
names.append(os.path.join(test_dir, n))
|
||||||
|
|
||||||
for filename in names:
|
for filename in names:
|
||||||
print("Testing %s" % filename)
|
|
||||||
source = read_pyfile(filename)
|
source = read_pyfile(filename)
|
||||||
check_ast_roundtrip(source)
|
check_ast_roundtrip(source)
|
||||||
|
|
||||||
|
@@ -685,22 +685,6 @@ def test_str(self) -> None:
|
|||||||
c["shared"] = BoolValuedVariant("shared", True)
|
c["shared"] = BoolValuedVariant("shared", True)
|
||||||
assert str(c) == "+shared feebar=foo foo=bar,baz foobar=fee"
|
assert str(c) == "+shared feebar=foo foo=bar,baz foobar=fee"
|
||||||
|
|
||||||
def test_concrete(self, mock_packages, config) -> None:
|
|
||||||
spec = Spec("pkg-a")
|
|
||||||
assert not VariantMap(spec).concrete
|
|
||||||
|
|
||||||
# concrete if associated spec is concrete
|
|
||||||
spec = spack.concretize.concretize_one(spec)
|
|
||||||
assert VariantMap(spec).concrete
|
|
||||||
|
|
||||||
# concrete if all variants are present (even if spec not concrete)
|
|
||||||
spec._mark_concrete(False)
|
|
||||||
assert spec.variants.concrete
|
|
||||||
|
|
||||||
# remove a variant to test the condition
|
|
||||||
del spec.variants["foo"]
|
|
||||||
assert not spec.variants.concrete
|
|
||||||
|
|
||||||
|
|
||||||
def test_disjoint_set_initialization_errors():
|
def test_disjoint_set_initialization_errors():
|
||||||
# Constructing from non-disjoint sets should raise an exception
|
# Constructing from non-disjoint sets should raise an exception
|
||||||
|
@@ -133,7 +133,7 @@ def test_check_prefix_manifest(tmpdir):
|
|||||||
|
|
||||||
spec = spack.spec.Spec("libelf")
|
spec = spack.spec.Spec("libelf")
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
spec.prefix = prefix
|
spec.set_prefix(prefix)
|
||||||
|
|
||||||
results = spack.verify.check_spec_manifest(spec)
|
results = spack.verify.check_spec_manifest(spec)
|
||||||
assert results.has_errors()
|
assert results.has_errors()
|
||||||
|
@@ -35,8 +35,8 @@ def test_view_with_spec_not_contributing_files(mock_packages, tmpdir):
|
|||||||
|
|
||||||
a = Spec("pkg-a")
|
a = Spec("pkg-a")
|
||||||
b = Spec("pkg-b")
|
b = Spec("pkg-b")
|
||||||
a.prefix = os.path.join(tmpdir, "a")
|
a.set_prefix(os.path.join(tmpdir, "a"))
|
||||||
b.prefix = os.path.join(tmpdir, "b")
|
b.set_prefix(os.path.join(tmpdir, "b"))
|
||||||
a._mark_concrete()
|
a._mark_concrete()
|
||||||
b._mark_concrete()
|
b._mark_concrete()
|
||||||
|
|
||||||
|
@@ -68,7 +68,9 @@ def project_env_mods(
|
|||||||
*specs: spack.spec.Spec, view, env: environment.EnvironmentModifications
|
*specs: spack.spec.Spec, view, env: environment.EnvironmentModifications
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Given a list of environment modifications, project paths changes to the view."""
|
"""Given a list of environment modifications, project paths changes to the view."""
|
||||||
prefix_to_prefix = {s.prefix: view.get_projection_for_spec(s) for s in specs if not s.external}
|
prefix_to_prefix = {
|
||||||
|
str(s.prefix): view.get_projection_for_spec(s) for s in specs if not s.external
|
||||||
|
}
|
||||||
# Avoid empty regex if all external
|
# Avoid empty regex if all external
|
||||||
if not prefix_to_prefix:
|
if not prefix_to_prefix:
|
||||||
return
|
return
|
||||||
|
@@ -5,16 +5,17 @@
|
|||||||
import errno
|
import errno
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import shutil
|
import shutil
|
||||||
from typing import IO, Optional, Tuple
|
from typing import IO, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
from llnl.util.filesystem import mkdirp, rename
|
from llnl.util.filesystem import rename
|
||||||
|
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
||||||
|
|
||||||
|
|
||||||
def _maybe_open(path: str) -> Optional[IO[str]]:
|
def _maybe_open(path: Union[str, pathlib.Path]) -> Optional[IO[str]]:
|
||||||
try:
|
try:
|
||||||
return open(path, "r", encoding="utf-8")
|
return open(path, "r", encoding="utf-8")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
@@ -24,7 +25,7 @@ def _maybe_open(path: str) -> Optional[IO[str]]:
|
|||||||
|
|
||||||
|
|
||||||
class ReadContextManager:
|
class ReadContextManager:
|
||||||
def __init__(self, path: str) -> None:
|
def __init__(self, path: Union[str, pathlib.Path]) -> None:
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
def __enter__(self) -> Optional[IO[str]]:
|
def __enter__(self) -> Optional[IO[str]]:
|
||||||
@@ -70,7 +71,7 @@ class FileCache:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root, timeout=120):
|
def __init__(self, root: Union[str, pathlib.Path], timeout=120):
|
||||||
"""Create a file cache object.
|
"""Create a file cache object.
|
||||||
|
|
||||||
This will create the cache directory if it does not exist yet.
|
This will create the cache directory if it does not exist yet.
|
||||||
@@ -82,58 +83,60 @@ def __init__(self, root, timeout=120):
|
|||||||
for cache files, this specifies how long Spack should wait
|
for cache files, this specifies how long Spack should wait
|
||||||
before assuming that there is a deadlock.
|
before assuming that there is a deadlock.
|
||||||
"""
|
"""
|
||||||
self.root = root.rstrip(os.path.sep)
|
if isinstance(root, str):
|
||||||
if not os.path.exists(self.root):
|
root = pathlib.Path(root)
|
||||||
mkdirp(self.root)
|
self.root = root
|
||||||
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
self._locks = {}
|
self._locks: Dict[Union[pathlib.Path, str], Lock] = {}
|
||||||
self.lock_timeout = timeout
|
self.lock_timeout = timeout
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove all files under the cache root."""
|
"""Remove all files under the cache root."""
|
||||||
for f in os.listdir(self.root):
|
for f in self.root.iterdir():
|
||||||
path = os.path.join(self.root, f)
|
if f.is_dir():
|
||||||
if os.path.isdir(path):
|
shutil.rmtree(f, True)
|
||||||
shutil.rmtree(path, True)
|
|
||||||
else:
|
else:
|
||||||
os.remove(path)
|
f.unlink()
|
||||||
|
|
||||||
def cache_path(self, key):
|
def cache_path(self, key: Union[str, pathlib.Path]):
|
||||||
"""Path to the file in the cache for a particular key."""
|
"""Path to the file in the cache for a particular key."""
|
||||||
return os.path.join(self.root, key)
|
return self.root / key
|
||||||
|
|
||||||
def _lock_path(self, key):
|
def _lock_path(self, key: Union[str, pathlib.Path]):
|
||||||
"""Path to the file in the cache for a particular key."""
|
"""Path to the file in the cache for a particular key."""
|
||||||
keyfile = os.path.basename(key)
|
keyfile = os.path.basename(key)
|
||||||
keydir = os.path.dirname(key)
|
keydir = os.path.dirname(key)
|
||||||
|
|
||||||
return os.path.join(self.root, keydir, "." + keyfile + ".lock")
|
return self.root / keydir / ("." + keyfile + ".lock")
|
||||||
|
|
||||||
def _get_lock(self, key):
|
def _get_lock(self, key: Union[str, pathlib.Path]):
|
||||||
"""Create a lock for a key, if necessary, and return a lock object."""
|
"""Create a lock for a key, if necessary, and return a lock object."""
|
||||||
if key not in self._locks:
|
if key not in self._locks:
|
||||||
self._locks[key] = Lock(self._lock_path(key), default_timeout=self.lock_timeout)
|
self._locks[key] = Lock(str(self._lock_path(key)), default_timeout=self.lock_timeout)
|
||||||
return self._locks[key]
|
return self._locks[key]
|
||||||
|
|
||||||
def init_entry(self, key):
|
def init_entry(self, key: Union[str, pathlib.Path]):
|
||||||
"""Ensure we can access a cache file. Create a lock for it if needed.
|
"""Ensure we can access a cache file. Create a lock for it if needed.
|
||||||
|
|
||||||
Return whether the cache file exists yet or not.
|
Return whether the cache file exists yet or not.
|
||||||
"""
|
"""
|
||||||
cache_path = self.cache_path(key)
|
cache_path = self.cache_path(key)
|
||||||
|
# Avoid using pathlib here to allow the logic below to
|
||||||
|
# function as is
|
||||||
|
# TODO: Maybe refactor the following logic for pathlib
|
||||||
exists = os.path.exists(cache_path)
|
exists = os.path.exists(cache_path)
|
||||||
if exists:
|
if exists:
|
||||||
if not os.path.isfile(cache_path):
|
if not cache_path.is_file():
|
||||||
raise CacheError("Cache file is not a file: %s" % cache_path)
|
raise CacheError("Cache file is not a file: %s" % cache_path)
|
||||||
|
|
||||||
if not os.access(cache_path, os.R_OK):
|
if not os.access(cache_path, os.R_OK):
|
||||||
raise CacheError("Cannot access cache file: %s" % cache_path)
|
raise CacheError("Cannot access cache file: %s" % cache_path)
|
||||||
else:
|
else:
|
||||||
# if the file is hierarchical, make parent directories
|
# if the file is hierarchical, make parent directories
|
||||||
parent = os.path.dirname(cache_path)
|
parent = cache_path.parent
|
||||||
if parent.rstrip(os.path.sep) != self.root:
|
if parent != self.root:
|
||||||
mkdirp(parent)
|
parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
if not os.access(parent, os.R_OK | os.W_OK):
|
if not os.access(parent, os.R_OK | os.W_OK):
|
||||||
raise CacheError("Cannot access cache directory: %s" % parent)
|
raise CacheError("Cannot access cache directory: %s" % parent)
|
||||||
@@ -142,7 +145,7 @@ def init_entry(self, key):
|
|||||||
self._get_lock(key)
|
self._get_lock(key)
|
||||||
return exists
|
return exists
|
||||||
|
|
||||||
def read_transaction(self, key):
|
def read_transaction(self, key: Union[str, pathlib.Path]):
|
||||||
"""Get a read transaction on a file cache item.
|
"""Get a read transaction on a file cache item.
|
||||||
|
|
||||||
Returns a ReadTransaction context manager and opens the cache file for
|
Returns a ReadTransaction context manager and opens the cache file for
|
||||||
@@ -153,9 +156,11 @@ def read_transaction(self, key):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
path = self.cache_path(key)
|
path = self.cache_path(key)
|
||||||
return ReadTransaction(self._get_lock(key), acquire=lambda: ReadContextManager(path))
|
return ReadTransaction(
|
||||||
|
self._get_lock(key), acquire=lambda: ReadContextManager(path) # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
def write_transaction(self, key):
|
def write_transaction(self, key: Union[str, pathlib.Path]):
|
||||||
"""Get a write transaction on a file cache item.
|
"""Get a write transaction on a file cache item.
|
||||||
|
|
||||||
Returns a WriteTransaction context manager that opens a temporary file
|
Returns a WriteTransaction context manager that opens a temporary file
|
||||||
@@ -167,9 +172,11 @@ def write_transaction(self, key):
|
|||||||
if os.path.exists(path) and not os.access(path, os.W_OK):
|
if os.path.exists(path) and not os.access(path, os.W_OK):
|
||||||
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
||||||
|
|
||||||
return WriteTransaction(self._get_lock(key), acquire=lambda: WriteContextManager(path))
|
return WriteTransaction(
|
||||||
|
self._get_lock(key), acquire=lambda: WriteContextManager(path) # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
def mtime(self, key) -> float:
|
def mtime(self, key: Union[str, pathlib.Path]) -> float:
|
||||||
"""Return modification time of cache file, or -inf if it does not exist.
|
"""Return modification time of cache file, or -inf if it does not exist.
|
||||||
|
|
||||||
Time is in units returned by os.stat in the mtime field, which is
|
Time is in units returned by os.stat in the mtime field, which is
|
||||||
@@ -179,14 +186,14 @@ def mtime(self, key) -> float:
|
|||||||
if not self.init_entry(key):
|
if not self.init_entry(key):
|
||||||
return -math.inf
|
return -math.inf
|
||||||
else:
|
else:
|
||||||
return os.stat(self.cache_path(key)).st_mtime
|
return self.cache_path(key).stat().st_mtime
|
||||||
|
|
||||||
def remove(self, key):
|
def remove(self, key: Union[str, pathlib.Path]):
|
||||||
file = self.cache_path(key)
|
file = self.cache_path(key)
|
||||||
lock = self._get_lock(key)
|
lock = self._get_lock(key)
|
||||||
try:
|
try:
|
||||||
lock.acquire_write()
|
lock.acquire_write()
|
||||||
os.unlink(file)
|
file.unlink()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
# File not found is OK, so remove is idempotent.
|
# File not found is OK, so remove is idempotent.
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
@@ -235,7 +236,7 @@ def add_padding(path, length):
|
|||||||
return os.path.join(path, padding)
|
return os.path.join(path, padding)
|
||||||
|
|
||||||
|
|
||||||
def canonicalize_path(path, default_wd=None):
|
def canonicalize_path(path: str, default_wd: Optional[str] = None) -> str:
|
||||||
"""Same as substitute_path_variables, but also take absolute path.
|
"""Same as substitute_path_variables, but also take absolute path.
|
||||||
|
|
||||||
If the string is a yaml object with file annotations, make absolute paths
|
If the string is a yaml object with file annotations, make absolute paths
|
||||||
@@ -243,26 +244,39 @@ def canonicalize_path(path, default_wd=None):
|
|||||||
Otherwise, use ``default_wd`` if specified, otherwise ``os.getcwd()``
|
Otherwise, use ``default_wd`` if specified, otherwise ``os.getcwd()``
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
path (str): path being converted as needed
|
path: path being converted as needed
|
||||||
|
|
||||||
Returns:
|
Returns: An absolute path or non-file URL with path variable substitution
|
||||||
(str): An absolute path with path variable substitution
|
|
||||||
"""
|
"""
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
# Get file in which path was written in case we need to make it absolute
|
# Get file in which path was written in case we need to make it absolute
|
||||||
# relative to that path.
|
# relative to that path.
|
||||||
filename = None
|
filename = None
|
||||||
if isinstance(path, syaml.syaml_str):
|
if isinstance(path, syaml.syaml_str):
|
||||||
filename = os.path.dirname(path._start_mark.name)
|
filename = os.path.dirname(path._start_mark.name) # type: ignore[attr-defined]
|
||||||
assert path._start_mark.name == path._end_mark.name
|
assert path._start_mark.name == path._end_mark.name # type: ignore[attr-defined]
|
||||||
|
|
||||||
path = substitute_path_variables(path)
|
path = substitute_path_variables(path)
|
||||||
|
|
||||||
|
url = urllib.parse.urlparse(path)
|
||||||
|
url_path = urllib.request.url2pathname(url.path)
|
||||||
|
if url.scheme:
|
||||||
|
if url.scheme != "file":
|
||||||
|
# Have a remote URL so simply return it with substitutions
|
||||||
|
return os.path.normpath(path)
|
||||||
|
|
||||||
|
# Drop the URL scheme from the local path
|
||||||
|
path = url_path
|
||||||
|
|
||||||
if not os.path.isabs(path):
|
if not os.path.isabs(path):
|
||||||
if filename:
|
if filename:
|
||||||
path = os.path.join(filename, path)
|
path = os.path.join(filename, path)
|
||||||
else:
|
else:
|
||||||
base = default_wd or os.getcwd()
|
base = default_wd or os.getcwd()
|
||||||
path = os.path.join(base, path)
|
path = os.path.join(base, path)
|
||||||
tty.debug("Using working directory %s as base for abspath" % base)
|
tty.debug(f"Using working directory {base} as base for abspath")
|
||||||
|
|
||||||
return os.path.normpath(path)
|
return os.path.normpath(path)
|
||||||
|
|
||||||
@@ -347,6 +361,7 @@ def filter_padding():
|
|||||||
This is needed because Spack's debug output gets extremely long when we use a
|
This is needed because Spack's debug output gets extremely long when we use a
|
||||||
long padded installation path.
|
long padded installation path.
|
||||||
"""
|
"""
|
||||||
|
# circular import
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|
||||||
padding = spack.config.get("config:install_tree:padded_length", None)
|
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||||
|
137
lib/spack/spack/util/remote_file_cache.py
Normal file
137
lib/spack/spack/util/remote_file_cache.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
from typing import Callable, Optional
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import copy, join_path, mkdirp
|
||||||
|
|
||||||
|
import spack.util.crypto
|
||||||
|
from spack.util.path import canonicalize_path
|
||||||
|
from spack.util.url import validate_scheme
|
||||||
|
|
||||||
|
|
||||||
|
def raw_github_gitlab_url(url: str) -> str:
|
||||||
|
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: url to be converted to raw form
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Raw github/gitlab url or the original url
|
||||||
|
"""
|
||||||
|
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||||
|
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||||
|
# removed and or, '/blame' if needed.
|
||||||
|
if "github" in url or "gitlab" in url:
|
||||||
|
return url.replace("/blob/", "/raw/")
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_remote_text_file(url: str, dest_dir: str) -> str:
|
||||||
|
"""Retrieve the text file from the url into the destination directory.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
url: URL for the remote text file
|
||||||
|
dest_dir: destination directory in which to stage the file locally
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the fetched file
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: if there are missing required arguments
|
||||||
|
"""
|
||||||
|
from spack.util.web import fetch_url_text # circular import
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
raise ValueError("Cannot retrieve the remote file without the URL")
|
||||||
|
|
||||||
|
raw_url = raw_github_gitlab_url(url)
|
||||||
|
tty.debug(f"Fetching file from {raw_url} into {dest_dir}")
|
||||||
|
|
||||||
|
return fetch_url_text(raw_url, dest_dir=dest_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def local_path(raw_path: str, sha256: str, make_dest: Optional[Callable[[], str]] = None) -> str:
|
||||||
|
"""Determine the actual path and, if remote, stage its contents locally.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
raw_path: raw path with possible variables needing substitution
|
||||||
|
sha256: the expected sha256 for the file
|
||||||
|
make_dest: function to create a stage for remote files, if needed (e.g., `mkdtemp`)
|
||||||
|
|
||||||
|
Returns: resolved, normalized local path or None
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: missing or mismatched arguments, unsupported URL scheme
|
||||||
|
"""
|
||||||
|
if not raw_path:
|
||||||
|
raise ValueError("path argument is required to cache remote files")
|
||||||
|
|
||||||
|
# Allow paths (and URLs) to contain spack config/environment variables,
|
||||||
|
# etc.
|
||||||
|
path = canonicalize_path(raw_path)
|
||||||
|
url = urllib.parse.urlparse(path)
|
||||||
|
|
||||||
|
# Path isn't remote so return absolute, normalized path with substitutions.
|
||||||
|
if url.scheme in ["", "file"]:
|
||||||
|
return path
|
||||||
|
|
||||||
|
# If scheme is not valid, path is not a url
|
||||||
|
# of a type Spack is generally aware
|
||||||
|
if validate_scheme(url.scheme):
|
||||||
|
# Fetch files from supported URL schemes.
|
||||||
|
if url.scheme in ("http", "https", "ftp"):
|
||||||
|
if make_dest is None:
|
||||||
|
raise ValueError("Requires the destination argument to cache remote files")
|
||||||
|
|
||||||
|
# Stage the remote configuration file
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
staged_path = fetch_remote_text_file(path, tmpdir)
|
||||||
|
|
||||||
|
# Ensure the sha256 is expected.
|
||||||
|
checksum = spack.util.crypto.checksum(hashlib.sha256, staged_path)
|
||||||
|
if sha256 and checksum != sha256:
|
||||||
|
raise ValueError(
|
||||||
|
f"Actual sha256 ('{checksum}') does not match expected ('{sha256}')"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Help the user by reporting the required checksum.
|
||||||
|
if not sha256:
|
||||||
|
raise ValueError(f"Requires sha256 ('{checksum}') to cache remote files.")
|
||||||
|
|
||||||
|
# Copy the file to the destination directory
|
||||||
|
dest_dir = join_path(make_dest(), checksum)
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
mkdirp(dest_dir)
|
||||||
|
|
||||||
|
cache_path = join_path(dest_dir, os.path.basename(staged_path))
|
||||||
|
copy(staged_path, cache_path)
|
||||||
|
tty.debug(f"Cached {raw_path} in {cache_path}")
|
||||||
|
|
||||||
|
# Stash the associated URL to aid with debugging
|
||||||
|
with open(join_path(dest_dir, "source_url.txt"), "w", encoding="utf-8") as f:
|
||||||
|
f.write(f"{raw_path}\n")
|
||||||
|
|
||||||
|
return cache_path
|
||||||
|
|
||||||
|
except ValueError as err:
|
||||||
|
tty.warn(f"Unable to cache {raw_path}: {str(err)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
raise ValueError(f"Unsupported URL scheme ({url.scheme}) in {raw_path}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid URL scheme ({url.scheme}) in {raw_path}")
|
212
lib/spack/spack/verify_libraries.py
Normal file
212
lib/spack/spack/verify_libraries.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import IO, Dict, List
|
||||||
|
|
||||||
|
from llnl.util.filesystem import BaseDirectoryVisitor
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
|
import spack.util.elf as elf
|
||||||
|
|
||||||
|
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
||||||
|
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
||||||
|
#: default search paths of the dynamic linker.
|
||||||
|
ALLOW_UNRESOLVED = [
|
||||||
|
# kernel
|
||||||
|
"linux-vdso.so.*",
|
||||||
|
"libselinux.so.*",
|
||||||
|
# musl libc
|
||||||
|
"ld-musl-*.so.*",
|
||||||
|
# glibc
|
||||||
|
"ld-linux*.so.*",
|
||||||
|
"ld64.so.*",
|
||||||
|
"libanl.so.*",
|
||||||
|
"libc.so.*",
|
||||||
|
"libdl.so.*",
|
||||||
|
"libm.so.*",
|
||||||
|
"libmemusage.so.*",
|
||||||
|
"libmvec.so.*",
|
||||||
|
"libnsl.so.*",
|
||||||
|
"libnss_compat.so.*",
|
||||||
|
"libnss_db.so.*",
|
||||||
|
"libnss_dns.so.*",
|
||||||
|
"libnss_files.so.*",
|
||||||
|
"libnss_hesiod.so.*",
|
||||||
|
"libpcprofile.so.*",
|
||||||
|
"libpthread.so.*",
|
||||||
|
"libresolv.so.*",
|
||||||
|
"librt.so.*",
|
||||||
|
"libSegFault.so.*",
|
||||||
|
"libthread_db.so.*",
|
||||||
|
"libutil.so.*",
|
||||||
|
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
||||||
|
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
||||||
|
"libasan.so.*",
|
||||||
|
"libatomic.so.*",
|
||||||
|
"libcc1.so.*",
|
||||||
|
"libgcc_s.so.*",
|
||||||
|
"libgfortran.so.*",
|
||||||
|
"libgomp.so.*",
|
||||||
|
"libitm.so.*",
|
||||||
|
"liblsan.so.*",
|
||||||
|
"libquadmath.so.*",
|
||||||
|
"libssp.so.*",
|
||||||
|
"libstdc++.so.*",
|
||||||
|
"libtsan.so.*",
|
||||||
|
"libubsan.so.*",
|
||||||
|
# systemd
|
||||||
|
"libudev.so.*",
|
||||||
|
# cuda driver
|
||||||
|
"libcuda.so.*",
|
||||||
|
# intel-oneapi-runtime
|
||||||
|
"libur_loader.so.*",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
||||||
|
return (
|
||||||
|
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
||||||
|
and parent.is_little_endian == child.is_little_endian
|
||||||
|
and parent.is_64_bit == child.is_64_bit
|
||||||
|
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
with open(candidate_path, "rb") as g:
|
||||||
|
return is_compatible(current_elf, elf.parse_elf(g))
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Problem:
|
||||||
|
def __init__(
|
||||||
|
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
||||||
|
) -> None:
|
||||||
|
self.resolved = resolved
|
||||||
|
self.unresolved = unresolved
|
||||||
|
self.relative_rpaths = relative_rpaths
|
||||||
|
|
||||||
|
|
||||||
|
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
||||||
|
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
||||||
|
self.problems: Dict[str, Problem] = {}
|
||||||
|
self._allow_unresolved_regex = re.compile(
|
||||||
|
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
||||||
|
)
|
||||||
|
|
||||||
|
def allow_unresolved(self, needed: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
name = needed.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return False
|
||||||
|
return bool(self._allow_unresolved_regex.match(name))
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# We work with byte strings for paths.
|
||||||
|
path = os.path.join(root, rel_path).encode("utf-8")
|
||||||
|
|
||||||
|
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
||||||
|
origin = os.path.dirname(path)
|
||||||
|
|
||||||
|
# Retrieve the needed libs + rpaths.
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
# Not dealing with an invalid ELF file.
|
||||||
|
return
|
||||||
|
|
||||||
|
# If there's no needed libs all is good
|
||||||
|
if not parsed_elf.has_needed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the needed libs and rpaths (notice: byte strings)
|
||||||
|
# Don't force an encoding cause paths are just a bag of bytes.
|
||||||
|
needed_libs = parsed_elf.dt_needed_strs
|
||||||
|
|
||||||
|
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
||||||
|
|
||||||
|
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
||||||
|
# supported in general. Also remove empty paths.
|
||||||
|
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
||||||
|
|
||||||
|
# Do not allow relative rpaths (they are relative to the current working directory)
|
||||||
|
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
||||||
|
|
||||||
|
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
||||||
|
# a search.
|
||||||
|
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
||||||
|
|
||||||
|
# Do not allow relative paths in direct libs (they are relative to the current working
|
||||||
|
# directory)
|
||||||
|
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
||||||
|
|
||||||
|
resolved: Dict[bytes, bytes] = {}
|
||||||
|
|
||||||
|
for lib in search_libs:
|
||||||
|
if self.allow_unresolved(lib):
|
||||||
|
continue
|
||||||
|
for rpath in rpaths:
|
||||||
|
candidate = os.path.join(rpath, lib)
|
||||||
|
if candidate_matches(parsed_elf, candidate):
|
||||||
|
resolved[lib] = candidate
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
# Check if directly opened libs are compatible
|
||||||
|
for lib in direct_libs:
|
||||||
|
if candidate_matches(parsed_elf, lib):
|
||||||
|
resolved[lib] = lib
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
if unresolved or relative_rpaths:
|
||||||
|
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
# There can be binaries in .spack/test which shouldn't be checked.
|
||||||
|
if rel_path == ".spack":
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def write(self, output: IO[str], *, indent=0, brief: bool = False) -> None:
|
||||||
|
indent_str = " " * indent
|
||||||
|
for path, problem in self.problems.items():
|
||||||
|
output.write(indent_str)
|
||||||
|
output.write(path)
|
||||||
|
output.write("\n")
|
||||||
|
if not brief:
|
||||||
|
for needed, full_path in problem.resolved.items():
|
||||||
|
output.write(indent_str)
|
||||||
|
output.write(" ")
|
||||||
|
if needed == full_path:
|
||||||
|
output.write(_decode_or_raw(needed))
|
||||||
|
else:
|
||||||
|
output.write(f"{_decode_or_raw(needed)} => {_decode_or_raw(full_path)}")
|
||||||
|
output.write("\n")
|
||||||
|
for not_found in problem.unresolved:
|
||||||
|
output.write(indent_str)
|
||||||
|
output.write(f" {_decode_or_raw(not_found)} => not found\n")
|
||||||
|
for relative_rpath in problem.relative_rpaths:
|
||||||
|
output.write(indent_str)
|
||||||
|
output.write(f" {_decode_or_raw(relative_rpath)} => relative rpath\n")
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_or_raw(byte_str: bytes) -> str:
|
||||||
|
try:
|
||||||
|
return byte_str.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return f"{byte_str!r}"
|
@@ -426,7 +426,7 @@ developer-tools-aarch64-linux-gnu-build:
|
|||||||
SPACK_CI_STACK_NAME: developer-tools-darwin
|
SPACK_CI_STACK_NAME: developer-tools-darwin
|
||||||
|
|
||||||
developer-tools-darwin-generate:
|
developer-tools-darwin-generate:
|
||||||
tags: [ "macos-sonoma", "apple-clang-16", "aarch64-macos" ]
|
tags: [ "macos-sequoia", "apple-clang-16", "aarch64-macos" ]
|
||||||
extends: [ ".developer-tools-darwin", ".generate-base"]
|
extends: [ ".developer-tools-darwin", ".generate-base"]
|
||||||
|
|
||||||
developer-tools-darwin-build:
|
developer-tools-darwin-build:
|
||||||
@@ -686,7 +686,7 @@ ml-linux-aarch64-cuda-build:
|
|||||||
SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps
|
SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps
|
||||||
|
|
||||||
ml-darwin-aarch64-mps-generate:
|
ml-darwin-aarch64-mps-generate:
|
||||||
tags: [ "macos-sonoma", "apple-clang-16", "aarch64-macos" ]
|
tags: [ "macos-sequoia", "apple-clang-16", "aarch64-macos" ]
|
||||||
extends: [ ".ml-darwin-aarch64-mps", ".generate-base"]
|
extends: [ ".ml-darwin-aarch64-mps", ".generate-base"]
|
||||||
|
|
||||||
ml-darwin-aarch64-mps-build:
|
ml-darwin-aarch64-mps-build:
|
||||||
@@ -910,7 +910,7 @@ bootstrap-x86_64-linux-gnu-build:
|
|||||||
SPACK_CI_STACK_NAME: bootstrap-aarch64-darwin
|
SPACK_CI_STACK_NAME: bootstrap-aarch64-darwin
|
||||||
|
|
||||||
bootstrap-aarch64-darwin-generate:
|
bootstrap-aarch64-darwin-generate:
|
||||||
tags: [ "macos-sonoma", "apple-clang-16", "aarch64-macos" ]
|
tags: [ "macos-sequoia", "apple-clang-16", "aarch64-macos" ]
|
||||||
extends: [.bootstrap-aarch64-darwin, .generate-base]
|
extends: [.bootstrap-aarch64-darwin, .generate-base]
|
||||||
|
|
||||||
bootstrap-aarch64-darwin-build:
|
bootstrap-aarch64-darwin-build:
|
||||||
|
@@ -20,8 +20,9 @@ ci:
|
|||||||
- k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
- k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
||||||
- k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
- k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k
|
||||||
script::
|
script::
|
||||||
- - spack config blame mirrors
|
- - if [ -n "$SPACK_EXTRA_MIRROR" ]; then spack mirror add local "$SPACK_EXTRA_MIRROR"; fi
|
||||||
- spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
- spack config blame mirrors
|
||||||
|
- - spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||||
after_script:
|
after_script:
|
||||||
- - cat /proc/loadavg || true
|
- - cat /proc/loadavg || true
|
||||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
config:
|
config:
|
||||||
|
build_stage:
|
||||||
|
- $spack/tmp/stage
|
||||||
install_tree:
|
install_tree:
|
||||||
root: $spack/opt/spack
|
root: $spack/opt/spack
|
||||||
|
@@ -1,6 +1,20 @@
|
|||||||
ci:
|
ci:
|
||||||
|
broken-tests-packages:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
||||||
|
- py-mpi4py
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
|
- build-job-remove:
|
||||||
|
tags: [spack]
|
||||||
- build-job:
|
- build-job:
|
||||||
tags: [ "macos-sonoma", "apple-clang-16", "aarch64-macos" ]
|
tags: [ "macos-sequoia", "apple-clang-16", "aarch64-macos" ]
|
||||||
|
|
||||||
|
# after_script intended to ensure all stage files are properly cleaned up,
|
||||||
|
# including those that may have been created as read-only by `go mod`
|
||||||
|
# as part of installation of a golang package
|
||||||
|
# see: https://github.com/spack/spack/issues/49147
|
||||||
|
after_script-:
|
||||||
|
- - if [[ -d tmp ]] ; then chmod -R u+w tmp ; else echo tmp not found ; fi
|
||||||
|
- ./bin/spack clean -a
|
||||||
- build-job-remove:
|
- build-job-remove:
|
||||||
image:: macos-run-on-metal
|
image:: macos-run-on-metal
|
||||||
|
@@ -52,10 +52,12 @@ spack:
|
|||||||
packages:
|
packages:
|
||||||
acfl:
|
acfl:
|
||||||
require:
|
require:
|
||||||
- '%gcc target=aarch64'
|
- "%gcc"
|
||||||
|
- "target=aarch64"
|
||||||
gromacs:
|
gromacs:
|
||||||
require:
|
require:
|
||||||
- gromacs@2024.3 %gcc ^armpl-gcc ^openmpi
|
- gromacs@2024.3 ^armpl-gcc ^openmpi
|
||||||
|
- "%gcc"
|
||||||
libfabric:
|
libfabric:
|
||||||
buildable: true
|
buildable: true
|
||||||
externals:
|
externals:
|
||||||
@@ -67,13 +69,14 @@ spack:
|
|||||||
variants: ~lldb
|
variants: ~lldb
|
||||||
mpas-model:
|
mpas-model:
|
||||||
require:
|
require:
|
||||||
- precision=single %gcc ^parallelio+pnetcdf
|
- precision=single ^parallelio+pnetcdf
|
||||||
|
- "%gcc"
|
||||||
mpich:
|
mpich:
|
||||||
require:
|
require:
|
||||||
- mpich pmi=pmi2 device=ch4 netmod=ofi +slurm
|
- mpich pmi=pmi2 device=ch4 netmod=ofi +slurm
|
||||||
nvhpc:
|
nvhpc:
|
||||||
require:
|
require:
|
||||||
- nvhpc %gcc target=aarch64
|
- "target=aarch64"
|
||||||
openfoam:
|
openfoam:
|
||||||
require:
|
require:
|
||||||
- openfoam ^scotch@6.0.9
|
- openfoam ^scotch@6.0.9
|
||||||
@@ -85,7 +88,7 @@ spack:
|
|||||||
# require:
|
# require:
|
||||||
# - one_of: ["palace cxxflags=\"-include cstdint\" ^fmt@9.1.0"]
|
# - one_of: ["palace cxxflags=\"-include cstdint\" ^fmt@9.1.0"]
|
||||||
pmix:
|
pmix:
|
||||||
require: 'pmix@3:'
|
require: "pmix@3:"
|
||||||
quantum-espresso:
|
quantum-espresso:
|
||||||
require:
|
require:
|
||||||
- quantum-espresso@6.6 %gcc ^armpl-gcc
|
- quantum-espresso@6.6 %gcc ^armpl-gcc
|
||||||
|
@@ -82,6 +82,12 @@ spack:
|
|||||||
require:
|
require:
|
||||||
- lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package fft=mkl ^intel-oneapi-mkl
|
- lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package fft=mkl ^intel-oneapi-mkl
|
||||||
- one_of: [+intel target=x86_64_v4, target=x86_64_v3]
|
- one_of: [+intel target=x86_64_v4, target=x86_64_v3]
|
||||||
|
bison:
|
||||||
|
require:
|
||||||
|
- "%gcc"
|
||||||
|
boost:
|
||||||
|
require:
|
||||||
|
- "%gcc"
|
||||||
libfabric:
|
libfabric:
|
||||||
buildable: true
|
buildable: true
|
||||||
externals:
|
externals:
|
||||||
|
@@ -25,8 +25,6 @@ spack:
|
|||||||
|
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job-remove:
|
|
||||||
tags: [spack, public]
|
|
||||||
- build-job:
|
- build-job:
|
||||||
variables:
|
variables:
|
||||||
CI_GPG_KEY_ROOT: /etc/protected-runner
|
CI_GPG_KEY_ROOT: /etc/protected-runner
|
||||||
|
@@ -2,9 +2,10 @@ spack:
|
|||||||
view: false
|
view: false
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: target=x86_64_v3
|
require:
|
||||||
definitions:
|
- target=x86_64_v3
|
||||||
- default_specs:
|
|
||||||
|
specs:
|
||||||
- 'uncrustify build_system=autotools'
|
- 'uncrustify build_system=autotools'
|
||||||
- 'uncrustify build_system=cmake'
|
- 'uncrustify build_system=cmake'
|
||||||
- lz4 # MakefilePackage
|
- lz4 # MakefilePackage
|
||||||
@@ -14,13 +15,6 @@ spack:
|
|||||||
- r-rcpp # RPackage
|
- r-rcpp # RPackage
|
||||||
- ruby-rake # RubyPackage
|
- ruby-rake # RubyPackage
|
||||||
- perl-data-dumper # PerlPackage
|
- perl-data-dumper # PerlPackage
|
||||||
- arch:
|
|
||||||
- '%gcc'
|
|
||||||
|
|
||||||
specs:
|
|
||||||
- matrix:
|
|
||||||
- - $default_specs
|
|
||||||
- - $arch
|
|
||||||
|
|
||||||
cdash:
|
cdash:
|
||||||
build-group: Build Systems
|
build-group: Build Systems
|
||||||
|
@@ -2,12 +2,16 @@ spack:
|
|||||||
view: false
|
view: false
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: target=aarch64
|
require:
|
||||||
|
- target=aarch64
|
||||||
|
prefer:
|
||||||
|
- '%gcc'
|
||||||
|
|
||||||
concretizer:
|
concretizer:
|
||||||
unify: true
|
unify: true
|
||||||
reuse: false
|
reuse: false
|
||||||
definitions:
|
|
||||||
- default_specs:
|
specs:
|
||||||
# editors
|
# editors
|
||||||
- neovim~no_luajit
|
- neovim~no_luajit
|
||||||
- py-pynvim
|
- py-pynvim
|
||||||
@@ -68,14 +72,6 @@ spack:
|
|||||||
- doxygen
|
- doxygen
|
||||||
- meson
|
- meson
|
||||||
|
|
||||||
- arch:
|
|
||||||
- '%gcc target=aarch64'
|
|
||||||
|
|
||||||
specs:
|
|
||||||
- matrix:
|
|
||||||
- - $default_specs
|
|
||||||
- - $arch
|
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job:
|
- build-job:
|
||||||
|
@@ -9,7 +9,7 @@ spack:
|
|||||||
reuse: false
|
reuse: false
|
||||||
specs:
|
specs:
|
||||||
# editors
|
# editors
|
||||||
- neovim~no_luajit
|
#- neovim~no_luajit # build fails: https://github.com/spack/spack/pull/48453#issuecomment-2624788262
|
||||||
- py-pynvim
|
- py-pynvim
|
||||||
- emacs+json~native+treesitter # TODO native not supported until gcc builds on darwin
|
- emacs+json~native+treesitter # TODO native not supported until gcc builds on darwin
|
||||||
# - tree-sitter is a dep, should also have cli but no package
|
# - tree-sitter is a dep, should also have cli but no package
|
||||||
@@ -64,8 +64,6 @@ spack:
|
|||||||
|
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job-remove:
|
|
||||||
tags: [ spack, public ]
|
|
||||||
- build-job:
|
- build-job:
|
||||||
variables:
|
variables:
|
||||||
CI_GPG_KEY_ROOT: /etc/protected-runner
|
CI_GPG_KEY_ROOT: /etc/protected-runner
|
||||||
|
@@ -1,19 +1,20 @@
|
|||||||
spack:
|
spack:
|
||||||
view: false
|
view: false
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require:
|
require:
|
||||||
- target=x86_64_v3
|
- target=x86_64_v3
|
||||||
- ~cuda
|
- ~cuda
|
||||||
- ~rocm
|
- ~rocm
|
||||||
|
prefer:
|
||||||
|
- "%gcc"
|
||||||
concretizer:
|
concretizer:
|
||||||
unify: true
|
unify: true
|
||||||
reuse: false
|
reuse: false
|
||||||
static_analysis: true
|
static_analysis: true
|
||||||
|
|
||||||
definitions:
|
specs:
|
||||||
- default_specs:
|
|
||||||
# editors
|
# editors
|
||||||
- neovim~no_luajit
|
- neovim~no_luajit
|
||||||
- py-pynvim
|
- py-pynvim
|
||||||
@@ -74,14 +75,6 @@ spack:
|
|||||||
- doxygen
|
- doxygen
|
||||||
- meson
|
- meson
|
||||||
|
|
||||||
- arch:
|
|
||||||
- '%gcc target=x86_64_v3'
|
|
||||||
|
|
||||||
specs:
|
|
||||||
- matrix:
|
|
||||||
- - $default_specs
|
|
||||||
- - $arch
|
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job:
|
- build-job:
|
||||||
|
@@ -5,6 +5,7 @@ spack:
|
|||||||
view: false
|
view: false
|
||||||
|
|
||||||
concretizer:
|
concretizer:
|
||||||
|
static_analysis: true
|
||||||
reuse: false
|
reuse: false
|
||||||
unify: false
|
unify: false
|
||||||
|
|
||||||
@@ -14,8 +15,9 @@ spack:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: "%cce@18.0.0 target=x86_64_v3"
|
require:
|
||||||
compiler: [cce]
|
- target=x86_64_v3
|
||||||
|
- "%cce"
|
||||||
providers:
|
providers:
|
||||||
blas: [cray-libsci]
|
blas: [cray-libsci]
|
||||||
lapack: [cray-libsci]
|
lapack: [cray-libsci]
|
||||||
@@ -23,6 +25,21 @@ spack:
|
|||||||
tbb: [intel-tbb]
|
tbb: [intel-tbb]
|
||||||
scalapack: [netlib-scalapack]
|
scalapack: [netlib-scalapack]
|
||||||
variants: +mpi
|
variants: +mpi
|
||||||
|
|
||||||
|
# Virtuals
|
||||||
|
blas:
|
||||||
|
require:
|
||||||
|
- cray-libsci
|
||||||
|
lapack:
|
||||||
|
require:
|
||||||
|
- cray-libsci
|
||||||
|
mpi:
|
||||||
|
require:
|
||||||
|
- cray-mpich
|
||||||
|
scalapack:
|
||||||
|
require:
|
||||||
|
- netlib-scalapack
|
||||||
|
|
||||||
ncurses:
|
ncurses:
|
||||||
require: +termlib ldflags=-Wl,--undefined-version
|
require: +termlib ldflags=-Wl,--undefined-version
|
||||||
tbb:
|
tbb:
|
||||||
@@ -33,21 +50,28 @@ spack:
|
|||||||
variants: +python +filesystem +iostreams +system
|
variants: +python +filesystem +iostreams +system
|
||||||
elfutils:
|
elfutils:
|
||||||
variants: ~nls
|
variants: ~nls
|
||||||
require: "%gcc"
|
require:
|
||||||
|
- target=x86_64_v3
|
||||||
|
- "%gcc"
|
||||||
gcc-runtime:
|
gcc-runtime:
|
||||||
require: "%gcc"
|
require:
|
||||||
|
- target=x86_64_v3
|
||||||
|
- "%gcc"
|
||||||
hdf5:
|
hdf5:
|
||||||
variants: +fortran +hl +shared
|
variants: +fortran +hl +shared
|
||||||
libfabric:
|
libfabric:
|
||||||
variants: fabrics=sockets,tcp,udp,rxm
|
variants: fabrics=sockets,tcp,udp,rxm
|
||||||
mgard:
|
mgard:
|
||||||
require:
|
require:
|
||||||
|
- target=x86_64_v3
|
||||||
- "@2023-01-10:"
|
- "@2023-01-10:"
|
||||||
mpich:
|
mpich:
|
||||||
variants: ~wrapperrpath
|
variants: ~wrapperrpath
|
||||||
paraview:
|
paraview:
|
||||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||||
require: "~qt ^[virtuals=gl] osmesa"
|
require:
|
||||||
|
- "~qt ^[virtuals=gl] osmesa"
|
||||||
|
- target=x86_64_v3
|
||||||
trilinos:
|
trilinos:
|
||||||
require:
|
require:
|
||||||
- one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
- one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
||||||
@@ -58,6 +82,7 @@ spack:
|
|||||||
- one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko]
|
- one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko]
|
||||||
- one_of: [+superlu-dist, ~superlu-dist]
|
- one_of: [+superlu-dist, ~superlu-dist]
|
||||||
- one_of: [+shylu, ~shylu]
|
- one_of: [+shylu, ~shylu]
|
||||||
|
- target=x86_64_v3
|
||||||
|
|
||||||
specs:
|
specs:
|
||||||
# CPU
|
# CPU
|
||||||
|
@@ -7,7 +7,9 @@ spack:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: '%gcc target=neoverse_v2'
|
require:
|
||||||
|
- "%gcc"
|
||||||
|
- target=neoverse_v2
|
||||||
providers:
|
providers:
|
||||||
blas: [openblas]
|
blas: [openblas]
|
||||||
mpi: [mpich]
|
mpi: [mpich]
|
||||||
|
@@ -4,12 +4,13 @@ spack:
|
|||||||
concretizer:
|
concretizer:
|
||||||
reuse: false
|
reuse: false
|
||||||
unify: false
|
unify: false
|
||||||
|
static_analysis: false
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require:
|
require:
|
||||||
- "target=x86_64_v3"
|
- target=x86_64_v3
|
||||||
- "%oneapi"
|
- '%oneapi'
|
||||||
providers:
|
providers:
|
||||||
blas: [openblas]
|
blas: [openblas]
|
||||||
tbb: [intel-tbb]
|
tbb: [intel-tbb]
|
||||||
@@ -38,32 +39,43 @@ spack:
|
|||||||
xz:
|
xz:
|
||||||
variants: +pic
|
variants: +pic
|
||||||
mpi:
|
mpi:
|
||||||
require: 'mpich@4: target=x86_64_v3'
|
require: intel-oneapi-mpi
|
||||||
mpich:
|
intel-oneapi-mpi:
|
||||||
require: '~wrapperrpath ~hwloc target=x86_64_v3'
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: intel-oneapi-mpi@2021.13.1
|
||||||
|
prefix: /opt/intel/oneapi
|
||||||
unzip:
|
unzip:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
binutils:
|
binutils:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
variants: +ld +gold +headers +libiberty ~nls
|
variants: +ld +gold +headers +libiberty ~nls
|
||||||
llvm:
|
llvm:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
ruby:
|
ruby:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
rust:
|
rust:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
krb5:
|
krb5:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
papi:
|
- '%gcc target=x86_64_v3'
|
||||||
require: '%gcc target=x86_64_v3'
|
|
||||||
openssh:
|
openssh:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
dyninst:
|
dyninst:
|
||||||
require: "%gcc target=x86_64_v3"
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
bison:
|
bison:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- '%gcc target=x86_64_v3'
|
||||||
paraview:
|
paraview:
|
||||||
require: "+examples %oneapi target=x86_64_v3"
|
require:
|
||||||
|
- +examples target=x86_64_v3
|
||||||
|
|
||||||
specs:
|
specs:
|
||||||
# CPU
|
# CPU
|
||||||
@@ -128,7 +140,7 @@ spack:
|
|||||||
- nccmp
|
- nccmp
|
||||||
- nco
|
- nco
|
||||||
- netlib-scalapack
|
- netlib-scalapack
|
||||||
- nrm
|
- nrm ^py-scipy cflags="-Wno-error=incompatible-function-pointer-types" # py-scipy@1.8.1 fails without cflags here
|
||||||
- nwchem
|
- nwchem
|
||||||
- omega-h
|
- omega-h
|
||||||
- openfoam
|
- openfoam
|
||||||
|
@@ -7,7 +7,8 @@ spack:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- 'target=x86_64_v3'
|
||||||
providers:
|
providers:
|
||||||
blas: [openblas]
|
blas: [openblas]
|
||||||
variants: +mpi
|
variants: +mpi
|
||||||
@@ -21,7 +22,9 @@ spack:
|
|||||||
variants: threads=openmp
|
variants: threads=openmp
|
||||||
paraview:
|
paraview:
|
||||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||||
require: "@5.11 +examples ~qt ^[virtuals=gl] osmesa %gcc target=x86_64_v3"
|
require:
|
||||||
|
- "@5.11 +examples ~qt ^[virtuals=gl] osmesa"
|
||||||
|
- 'target=x86_64_v3'
|
||||||
|
|
||||||
# ROCm
|
# ROCm
|
||||||
comgr:
|
comgr:
|
||||||
|
@@ -9,7 +9,8 @@ spack:
|
|||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require:
|
require:
|
||||||
- '%gcc target=x86_64_v3'
|
- "%gcc"
|
||||||
|
- target=x86_64_v3
|
||||||
variants: +mpi
|
variants: +mpi
|
||||||
mpi:
|
mpi:
|
||||||
require:
|
require:
|
||||||
|
@@ -8,14 +8,16 @@ spack:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: '%gcc target=x86_64_v3'
|
require:
|
||||||
|
- "%gcc"
|
||||||
|
- target=x86_64_v3
|
||||||
providers:
|
providers:
|
||||||
blas: [openblas]
|
blas: [openblas]
|
||||||
mpi: [mpich]
|
mpi: [mpich]
|
||||||
tbb: [intel-tbb]
|
tbb: [intel-tbb]
|
||||||
variants: +mpi
|
variants: +mpi
|
||||||
acts:
|
acts:
|
||||||
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python ~svg +tgeo cxxstd=20
|
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +svg +tgeo cxxstd=20
|
||||||
celeritas:
|
celeritas:
|
||||||
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
||||||
hip:
|
hip:
|
||||||
|
@@ -9,13 +9,13 @@ spack:
|
|||||||
- ~cuda
|
- ~cuda
|
||||||
- ~rocm
|
- ~rocm
|
||||||
mpi:
|
mpi:
|
||||||
require: openmpi
|
require: mpich
|
||||||
openblas:
|
openblas:
|
||||||
require: ~fortran
|
require: ~fortran
|
||||||
|
|
||||||
specs:
|
specs:
|
||||||
# Horovod
|
# Horovod
|
||||||
- py-horovod
|
# - py-horovod # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
|
|
||||||
# Hugging Face
|
# Hugging Face
|
||||||
- py-transformers
|
- py-transformers
|
||||||
@@ -45,18 +45,18 @@ spack:
|
|||||||
- py-segmentation-models-pytorch
|
- py-segmentation-models-pytorch
|
||||||
- py-timm
|
- py-timm
|
||||||
- py-torch
|
- py-torch
|
||||||
- py-torch-cluster
|
# - py-torch-cluster # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torch-geometric
|
- py-torch-geometric
|
||||||
- py-torch-nvidia-apex
|
# - py-torch-nvidia-apex # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torch-scatter
|
# - py-torch-scatter # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torch-sparse
|
# - py-torch-sparse # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torch-spline-conv
|
# - py-torch-spline-conv # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torchaudio
|
- py-torchaudio
|
||||||
- py-torchdata
|
- py-torchdata
|
||||||
- py-torchfile
|
- py-torchfile
|
||||||
- py-torchgeo
|
- py-torchgeo
|
||||||
- py-torchmetrics
|
- py-torchmetrics
|
||||||
- py-torchtext
|
# - py-torchtext # https://github.com/spack/spack/pull/48453#issuecomment-2676023970
|
||||||
- py-torchvision
|
- py-torchvision
|
||||||
- py-vector-quantize-pytorch
|
- py-vector-quantize-pytorch
|
||||||
|
|
||||||
@@ -84,8 +84,6 @@ spack:
|
|||||||
|
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job-remove:
|
|
||||||
tags: [ spack, public ]
|
|
||||||
- build-job:
|
- build-job:
|
||||||
variables:
|
variables:
|
||||||
CI_GPG_KEY_ROOT: /etc/protected-runner
|
CI_GPG_KEY_ROOT: /etc/protected-runner
|
||||||
|
@@ -2,17 +2,14 @@ spack:
|
|||||||
view: false
|
view: false
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
require: target=x86_64_v3
|
require:
|
||||||
|
- target=x86_64_v3
|
||||||
|
- '%gcc@7.5.0'
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
mpi: [mvapich2]
|
mpi: [mvapich2]
|
||||||
|
|
||||||
definitions:
|
specs:
|
||||||
#- compilers: ['%gcc@8.3.1', '%clang@10.0.0']
|
|
||||||
- compilers: ['%gcc@7.5.0']
|
|
||||||
|
|
||||||
# Note skipping spot since no spack package for it
|
|
||||||
- radiuss:
|
|
||||||
- ascent # ^conduit@0.6.0
|
- ascent # ^conduit@0.6.0
|
||||||
- axom
|
- axom
|
||||||
- blt
|
- blt
|
||||||
@@ -21,7 +18,7 @@ spack:
|
|||||||
- chai # ~examples
|
- chai # ~examples
|
||||||
- conduit # ^hdf5+shared
|
- conduit # ^hdf5+shared
|
||||||
- flux-core
|
- flux-core
|
||||||
#- flux-sched
|
# - flux-sched
|
||||||
- hypre
|
- hypre
|
||||||
- lbann
|
- lbann
|
||||||
- lvarray ~tests # per Spack issue #23192 # ~examples
|
- lvarray ~tests # per Spack issue #23192 # ~examples
|
||||||
@@ -36,14 +33,9 @@ spack:
|
|||||||
- scr
|
- scr
|
||||||
- sundials
|
- sundials
|
||||||
- umpire # ~openmp
|
- umpire # ~openmp
|
||||||
#- visit # ^mesa-glu@9.0.0
|
# - visit # ^mesa-glu@9.0.0
|
||||||
- xbraid
|
- xbraid
|
||||||
- zfp
|
- zfp
|
||||||
|
|
||||||
specs:
|
|
||||||
- matrix:
|
|
||||||
- [$radiuss]
|
|
||||||
- [$compilers]
|
|
||||||
|
|
||||||
cdash:
|
cdash:
|
||||||
build-group: RADIUSS
|
build-group: RADIUSS
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user