Compare commits
1 Commits
packages/g
...
hs/rocm-op
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eff56cd845 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -76,11 +76,10 @@ jobs:
|
|||||||
|
|
||||||
prechecks:
|
prechecks:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/prechecks.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
with_packages: ${{ needs.changes.outputs.packages }}
|
|
||||||
|
|
||||||
import-check:
|
import-check:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
@@ -102,7 +101,6 @@ jobs:
|
|||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
needs: [ unit-tests, prechecks ]
|
needs: [ unit-tests, prechecks ]
|
||||||
if: ${{ needs.changes.outputs.core }}
|
|
||||||
uses: ./.github/workflows/coverage.yml
|
uses: ./.github/workflows/coverage.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
black==25.1.0
|
black==24.10.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.2
|
flake8==7.1.1
|
||||||
isort==6.0.1
|
isort==5.13.2
|
||||||
mypy==1.15.0
|
mypy==1.11.2
|
||||||
types-six==1.17.0.20250304
|
types-six==1.17.0.20241205
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
name: prechecks
|
name: style
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@@ -6,9 +6,6 @@ on:
|
|||||||
with_coverage:
|
with_coverage:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
with_packages:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -33,7 +30,6 @@ jobs:
|
|||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||||
|
|
||||||
# Run style checks on the files that have been changed
|
# Run style checks on the files that have been changed
|
||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -57,25 +53,12 @@ jobs:
|
|||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
|
|
||||||
audit:
|
audit:
|
||||||
uses: ./.github/workflows/audit.yaml
|
uses: ./.github/workflows/audit.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.13'
|
python_version: '3.13'
|
||||||
|
|
||||||
verify-checksums:
|
|
||||||
if: ${{ inputs.with_packages == 'true' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
- name: Verify Added Checksums
|
|
||||||
run: |
|
|
||||||
bin/spack ci verify-versions HEAD^1 HEAD
|
|
||||||
|
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
bootstrap-dev-rhel8:
|
bootstrap-dev-rhel8:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,6 +201,7 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
|
*_archive
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
|||||||
@@ -43,28 +43,6 @@ concretizer:
|
|||||||
# (e.g. py-setuptools, cmake etc.)
|
# (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
|
|
||||||
# number used if there isn't a more specific alternative
|
|
||||||
max_dupes:
|
|
||||||
default: 1
|
|
||||||
# Virtuals
|
|
||||||
c: 2
|
|
||||||
cxx: 2
|
|
||||||
fortran: 1
|
|
||||||
# Regular packages
|
|
||||||
cmake: 2
|
|
||||||
gmake: 2
|
|
||||||
python: 2
|
|
||||||
python-venv: 2
|
|
||||||
py-cython: 2
|
|
||||||
py-flit-core: 2
|
|
||||||
py-pip: 2
|
|
||||||
py-setuptools: 2
|
|
||||||
py-wheel: 2
|
|
||||||
xcb-proto: 2
|
|
||||||
# Compilers
|
|
||||||
gcc: 2
|
|
||||||
llvm: 2
|
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
|
|||||||
@@ -1761,24 +1761,19 @@ Verifying installations
|
|||||||
The ``spack verify`` command can be used to verify the validity of
|
The ``spack verify`` command can be used to verify the validity of
|
||||||
Spack-installed packages any time after installation.
|
Spack-installed packages any time after installation.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack verify manifest``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
At installation time, Spack creates a manifest of every file in the
|
At installation time, Spack creates a manifest of every file in the
|
||||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||||
destination. For directories, Spack tracks the mode, and
|
destination. For directories, Spack tracks the mode, and
|
||||||
ownership. For files, Spack tracks the mode, ownership, modification
|
ownership. For files, Spack tracks the mode, ownership, modification
|
||||||
time, hash, and size. The ``spack verify manifest`` command will check,
|
time, hash, and size. The Spack verify command will check, for every
|
||||||
for every file in each package, whether any of those attributes have
|
file in each package, whether any of those attributes have changed. It
|
||||||
changed. It will also check for newly added files or deleted files from
|
will also check for newly added files or deleted files from the
|
||||||
the installation prefix. Spack can either check all installed packages
|
installation prefix. Spack can either check all installed packages
|
||||||
using the `-a,--all` or accept specs listed on the command line to
|
using the `-a,--all` or accept specs listed on the command line to
|
||||||
verify.
|
verify.
|
||||||
|
|
||||||
The ``spack verify manifest`` command can also verify for individual files
|
The ``spack verify`` command can also verify for individual files that
|
||||||
that they haven't been altered since installation time. If the given file
|
they haven't been altered since installation time. If the given file
|
||||||
is not in a Spack installation prefix, Spack will report that it is
|
is not in a Spack installation prefix, Spack will report that it is
|
||||||
not owned by any package. To check individual files instead of specs,
|
not owned by any package. To check individual files instead of specs,
|
||||||
use the ``-f,--files`` option.
|
use the ``-f,--files`` option.
|
||||||
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack verify libraries``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The ``spack verify libraries`` command can be used to verify that packages
|
|
||||||
do not have accidental system dependencies. This command scans the install
|
|
||||||
prefixes of packages for executables and shared libraries, and resolves
|
|
||||||
their needed libraries in their RPATHs. When needed libraries cannot be
|
|
||||||
located, an error is reported. This typically indicates that a package
|
|
||||||
was linked against a system library, instead of a library provided by
|
|
||||||
a Spack package.
|
|
||||||
|
|
||||||
This verification can also be enabled as a post-install hook by setting
|
|
||||||
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
|
||||||
in :ref:`config.yaml <config-yaml>`.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -223,10 +223,6 @@ def setup(sphinx):
|
|||||||
("py:class", "spack.compiler.CompilerCache"),
|
("py:class", "spack.compiler.CompilerCache"),
|
||||||
# TypeVar that is not handled correctly
|
# TypeVar that is not handled correctly
|
||||||
("py:class", "llnl.util.lang.T"),
|
("py:class", "llnl.util.lang.T"),
|
||||||
("py:class", "llnl.util.lang.KT"),
|
|
||||||
("py:class", "llnl.util.lang.VT"),
|
|
||||||
("py:obj", "llnl.util.lang.KT"),
|
|
||||||
("py:obj", "llnl.util.lang.VT"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
|||||||
@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
.. _Misc Cache:
|
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
-------------------------------
|
|
||||||
``concretization_cache:enable``
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
|
||||||
successful concretization runs. When enabled, Spack will check the concretization
|
|
||||||
cache prior to running the solver. If a previous request to solve a given
|
|
||||||
problem is present in the cache, Spack will load the concrete specs and other
|
|
||||||
solver data from the cache rather than running the solver. Specs not previously
|
|
||||||
concretized will be added to the cache on a successful solve. The cache additionally
|
|
||||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
|
||||||
about the run that produced a given solver result.
|
|
||||||
|
|
||||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
|
||||||
Cache is cleaned.
|
|
||||||
|
|
||||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
|
||||||
|
|
||||||
----------------------------
|
|
||||||
``concretization_cache:url``
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
|
||||||
paths on the local filesystem.
|
|
||||||
|
|
||||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
|
||||||
|
|
||||||
------------------------------------
|
|
||||||
``concretization_cache:entry_limit``
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
|
||||||
-----------------------------------
|
|
||||||
``concretization_cache:size_limit``
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
|
|||||||
* :ref:`compilers.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
* :ref:`include.yaml <include-yaml>`
|
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
|
|||||||
@@ -670,45 +670,24 @@ This configuration sets the default compiler for all packages to
|
|||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml schema.
|
Spack environments allow an ``include`` heading in their yaml
|
||||||
This heading pulls in external configuration files and applies them to
|
schema. This heading pulls in external configuration files and applies
|
||||||
the environment.
|
them to the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- environment/relative/path/to/config.yaml
|
- relative/path/to/config.yaml
|
||||||
- https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
- path: /path/to/$os/$target/environment
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
|
||||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
|
||||||
|
|
||||||
Files are listed using paths to individual files or directories containing them.
|
|
||||||
Path entries may be absolute or relative to the environment or specified as
|
|
||||||
URLs. URLs to individual files need link to the **raw** form of the file's
|
|
||||||
contents (e.g., `GitHub
|
|
||||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
|
||||||
or `GitLab
|
|
||||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
|
||||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
|
||||||
supported. Spack-specific, environment and user path variables can be used.
|
|
||||||
(See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
||||||
|
|
||||||
|
Environments can include files or URLs. File paths can be relative or
|
||||||
|
absolute. URLs include the path to the text for individual files or
|
||||||
|
can be the path to a directory containing configuration files.
|
||||||
|
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||||
|
schemes). Spack-specific, environment and user path variables may be
|
||||||
|
used in these paths. See :ref:`config-file-variables` for more information.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||||
|
|
||||||
The following set of criteria (from lowest to highest precedence) explain
|
The following set of criteria (from lowest to highest precedence) explain
|
||||||
common cases where concretization output may seem surprising at first.
|
common cases where concretization output may seem surprising at first.
|
||||||
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
|
|||||||
concretizer:
|
concretizer:
|
||||||
reuse: dependencies # other options are 'true' and 'false'
|
reuse: dependencies # other options are 'true' and 'false'
|
||||||
|
|
||||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
|
||||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
|
||||||
as follows:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
foo:
|
|
||||||
prefer:
|
|
||||||
- "@1.1: ~mpi"
|
|
||||||
|
|
||||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
|
||||||
and constraints from the command line as well as ``package.py`` files override all
|
and constraints from the command line as well as ``package.py`` files override all
|
||||||
of the above. Requirements are specified as follows:
|
of the above. Requirements are specified as follows:
|
||||||
|
|
||||||
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
|
|||||||
foo:
|
foo:
|
||||||
require:
|
require:
|
||||||
- "@1.2: +mpi"
|
- "@1.2: +mpi"
|
||||||
conflicts:
|
|
||||||
- "@1.4"
|
|
||||||
|
|
||||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||||
behavior and preferences influence what an optimal solution looks like.
|
behavior and preferences influence what an optimal solution looks like.
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _include-yaml:
|
|
||||||
|
|
||||||
===============================
|
|
||||||
Include Settings (include.yaml)
|
|
||||||
===============================
|
|
||||||
|
|
||||||
Spack allows you to include configuration files through ``include.yaml``.
|
|
||||||
Using the ``include:`` heading results in pulling in external configuration
|
|
||||||
information to be used by any Spack command.
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
|
||||||
example,
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
include:
|
|
||||||
- /path/to/a/required/config.yaml
|
|
||||||
- path: /path/to/$os/$target/config
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
|
||||||
indicates that included ``config.yaml`` file is required (so must exist).
|
|
||||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
|
||||||
the path is only included if it exists. The condition ``os == "ventura"``
|
|
||||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
|
||||||
path is only included when the operating system (``os``) is ``ventura``.
|
|
||||||
|
|
||||||
The same conditions and variables in `Spec List References
|
|
||||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
|
||||||
can be used for conditional activation in the ``when`` clauses.
|
|
||||||
|
|
||||||
Included files can be specified by path or by their parent directory.
|
|
||||||
Paths may be absolute, relative (to the configuration file including the path),
|
|
||||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
|
||||||
schemes) are supported. Spack-specific, environment and user path variables
|
|
||||||
can be used. (See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
||||||
@@ -71,7 +71,6 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
include_yaml
|
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
|
|||||||
@@ -486,8 +486,6 @@ present. For instance with a configuration like:
|
|||||||
|
|
||||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||||
|
|
||||||
.. _package-strong-preferences:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Conflicts and strong preferences
|
Conflicts and strong preferences
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
|||||||
build group on CDash called "Release Testing" (that group will be created if
|
build group on CDash called "Release Testing" (that group will be created if
|
||||||
it didn't already exist).
|
it didn't already exist).
|
||||||
|
|
||||||
.. _ci_artifacts:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
CI Artifacts Directory Layout
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
|
|
||||||
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
|
|
||||||
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
|
|
||||||
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
|
|
||||||
|
|
||||||
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
|
|
||||||
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
|
|
||||||
it require user specified files be written to any specific directory.
|
|
||||||
|
|
||||||
------------------------
|
|
||||||
``concrete_environment``
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
|
|
||||||
the concrete ``spack.lock`` for the CI environment.
|
|
||||||
|
|
||||||
--------
|
|
||||||
``logs``
|
|
||||||
--------
|
|
||||||
|
|
||||||
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
|
|
||||||
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
|
|
||||||
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
|
|
||||||
|
|
||||||
----------------
|
|
||||||
``reproduction``
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
|
|
||||||
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
|
|
||||||
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
|
|
||||||
|
|
||||||
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
|
|
||||||
An example of what a ``repro.json`` may look like is here.
|
|
||||||
|
|
||||||
.. code:: json
|
|
||||||
|
|
||||||
{
|
|
||||||
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
|
|
||||||
"job_spec_json": "adios2.json",
|
|
||||||
"ci_project_dir": "/builds/spack/spack"
|
|
||||||
}
|
|
||||||
|
|
||||||
---------
|
|
||||||
``tests``
|
|
||||||
---------
|
|
||||||
|
|
||||||
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
|
|
||||||
data in it depending on the package that was built and the availability of tests.
|
|
||||||
|
|
||||||
-------------
|
|
||||||
``user_data``
|
|
||||||
-------------
|
|
||||||
|
|
||||||
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
|
|
||||||
Users may use this to store additional logs or metrics or other types of files generated by the build job.
|
|
||||||
|
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
Using a custom spack in your pipeline
|
Using a custom spack in your pipeline
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.2.3
|
sphinx==8.1.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.27.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.5
|
pytest==8.3.4
|
||||||
isort==6.0.1
|
isort==5.13.2
|
||||||
black==25.1.0
|
black==24.10.0
|
||||||
flake8==7.1.2
|
flake8==7.1.1
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@@ -21,7 +20,6 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
IO,
|
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
|
|||||||
and vis versa.
|
and vis versa.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, package, link_install_prefix=True):
|
||||||
self,
|
|
||||||
package,
|
|
||||||
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
|
||||||
link_install_prefix: bool = True,
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
package (spack.package_base.PackageBase): Package requiring links
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
|
||||||
the root directory in which to establish the simulated rpath, ie where the
|
|
||||||
symlinks that comprise the "rpath" behavior will be installed.
|
|
||||||
|
|
||||||
Note: This is a mutually exclusive option with `link_install_prefix` using
|
|
||||||
both is an error.
|
|
||||||
|
|
||||||
Default: None
|
|
||||||
link_install_prefix (bool): Link against package's own install or stage root.
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
Packages that run their own executables during build and require rpaths to
|
Packages that run their own executables during build and require rpaths to
|
||||||
the build directory during build time require this option.
|
the build directory during build time require this option. Default: install
|
||||||
|
|
||||||
Default: install
|
|
||||||
root
|
root
|
||||||
|
|
||||||
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
|
||||||
both is an error.
|
|
||||||
"""
|
"""
|
||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths: set[str] = set()
|
self._addl_rpaths = set()
|
||||||
if link_install_prefix and base_modification_prefix:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
|
||||||
"Select either `link_install_prefix` to create an install prefix rpath"
|
|
||||||
" or specify a `base_modification_prefix` for any other link type. "
|
|
||||||
"Specifying both arguments is invalid."
|
|
||||||
)
|
|
||||||
if not (link_install_prefix or base_modification_prefix):
|
|
||||||
raise RuntimeError(
|
|
||||||
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
|
||||||
"WindowsSimulatedRPath requires one of link_install_prefix"
|
|
||||||
" or base_modification_prefix to be specified."
|
|
||||||
" Neither was provided."
|
|
||||||
)
|
|
||||||
|
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
if base_modification_prefix:
|
self._additional_library_dependents = set()
|
||||||
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
|
||||||
else:
|
|
||||||
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
|
||||||
self._additional_library_dependents: set[pathlib.Path] = set()
|
|
||||||
if not self.link_install_prefix:
|
|
||||||
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def library_dependents(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
base_pths = set()
|
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||||
if self.link_install_prefix:
|
|
||||||
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
|
||||||
base_pths |= self._additional_library_dependents
|
|
||||||
return base_pths
|
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
|
|||||||
new_pth = pathlib.Path(pth).parent
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
new_pth = pathlib.Path(pth)
|
||||||
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
|
||||||
if not path_is_in_prefix:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Attempting to generate rpath symlink out of rpath context:\
|
|
||||||
{str(self.base_modification_prefix)}"
|
|
||||||
)
|
|
||||||
self._additional_library_dependents.add(new_pth)
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2628,33 +2577,6 @@ def establish_link(self):
|
|||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
|
||||||
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
|
||||||
so an executable can test the libraries/executables with proper access
|
|
||||||
to dependent dlls
|
|
||||||
|
|
||||||
Note: this is a no-op on all other platforms besides Windows
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
|
||||||
test_dir: the testing directory in which we should construct an rpath
|
|
||||||
"""
|
|
||||||
# link_install_prefix as false ensures we're not linking into the install prefix
|
|
||||||
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
|
||||||
# add the testing directory as a location to install rpath symlinks
|
|
||||||
mini_rpath.add_library_dependent(test_dir)
|
|
||||||
|
|
||||||
# check for whether build_directory is available, if not
|
|
||||||
# assume the stage root is the build dir
|
|
||||||
build_dir_attr = getattr(pkg, "build_directory", None)
|
|
||||||
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
|
||||||
# add the build dir & build dir bin
|
|
||||||
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
|
||||||
mini_rpath.add_rpath(os.path.join(build_directory))
|
|
||||||
# construct rpath
|
|
||||||
mini_rpath.establish_link()
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def temporary_file_position(stream):
|
|
||||||
orig_pos = stream.tell()
|
|
||||||
yield
|
|
||||||
stream.seek(orig_pos)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
|
||||||
with temporary_file_position(stream):
|
|
||||||
stream.seek(loc, relative_to)
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
|||||||
@@ -11,11 +11,10 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Wrapper for lazily initialized singleton objects."""
|
"""Simple wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory: Callable[[], object]):
|
def __init__(self, factory):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
Most factories will simply create the object to be initialized and
|
|
||||||
return it.
|
|
||||||
|
|
||||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
|
||||||
may need to be initialized incrementally. If the factory returns a generator
|
|
||||||
instead of a regular object, the singleton will assign each result yielded by
|
|
||||||
the generator to the singleton instance. This allows methods called by
|
|
||||||
the factory in later stages to refer back to the singleton.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that creates the
|
factory (function): function taking no arguments that
|
||||||
singleton instance.
|
creates the singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
instance = self.factory()
|
self._instance = self.factory()
|
||||||
|
|
||||||
if isinstance(instance, types.GeneratorType):
|
|
||||||
# if it's a generator, assign every value
|
|
||||||
for value in instance:
|
|
||||||
self._instance = value
|
|
||||||
else:
|
|
||||||
# if not, just assign the result like a normal singleton
|
|
||||||
self._instance = instance
|
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -1100,88 +1080,3 @@ def __set__(self, instance, value):
|
|||||||
|
|
||||||
def factory(self, instance, owner):
|
def factory(self, instance, owner):
|
||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
|
||||||
KT = TypeVar("KT")
|
|
||||||
VT = TypeVar("VT")
|
|
||||||
|
|
||||||
|
|
||||||
class PriorityOrderedMapping(Mapping[KT, VT]):
|
|
||||||
"""Mapping that iterates over key according to an integer priority. If the priority is
|
|
||||||
the same for two keys, insertion order is what matters.
|
|
||||||
|
|
||||||
The priority is set when the key/value pair is added. If not set, the highest current priority
|
|
||||||
is used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_data: Dict[KT, VT]
|
|
||||||
_priorities: List[Tuple[int, KT]]
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._data = {}
|
|
||||||
# Tuple of (priority, key)
|
|
||||||
self._priorities = []
|
|
||||||
|
|
||||||
def __getitem__(self, key: KT) -> VT:
|
|
||||||
return self._data[key]
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._data)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
yield from (key for _, key in self._priorities)
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
yield from (key for _, key in reversed(self._priorities))
|
|
||||||
|
|
||||||
def reversed_keys(self):
|
|
||||||
"""Iterates over keys from the highest priority, to the lowest."""
|
|
||||||
return reversed(self)
|
|
||||||
|
|
||||||
def reversed_values(self):
|
|
||||||
"""Iterates over values from the highest priority, to the lowest."""
|
|
||||||
yield from (self._data[key] for _, key in reversed(self._priorities))
|
|
||||||
|
|
||||||
def _highest_priority(self) -> int:
|
|
||||||
if not self._priorities:
|
|
||||||
return 0
|
|
||||||
result, _ = self._priorities[-1]
|
|
||||||
return result
|
|
||||||
|
|
||||||
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
|
||||||
"""Adds a key/value pair to the mapping, with a specific priority.
|
|
||||||
|
|
||||||
If the priority is None, then it is assumed to be the highest priority value currently
|
|
||||||
in the container.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: when the same priority is already in the mapping
|
|
||||||
"""
|
|
||||||
if priority is None:
|
|
||||||
priority = self._highest_priority()
|
|
||||||
|
|
||||||
if key in self._data:
|
|
||||||
self.remove(key)
|
|
||||||
|
|
||||||
self._priorities.append((priority, key))
|
|
||||||
# We rely on sort being stable
|
|
||||||
self._priorities.sort(key=lambda x: x[0])
|
|
||||||
self._data[key] = value
|
|
||||||
assert len(self._data) == len(self._priorities)
|
|
||||||
|
|
||||||
def remove(self, key: KT) -> VT:
|
|
||||||
"""Removes a key from the mapping.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The value associated with the key being removed
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
KeyError: if the key is not in the mapping
|
|
||||||
"""
|
|
||||||
if key not in self._data:
|
|
||||||
raise KeyError(f"cannot find {key}")
|
|
||||||
|
|
||||||
popped_item = self._data.pop(key)
|
|
||||||
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
|
||||||
assert len(self._data) == len(self._priorities)
|
|
||||||
return popped_item
|
|
||||||
|
|||||||
@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
|
|||||||
self.src_a = src_a
|
self.src_a = src_a
|
||||||
self.src_b = src_b
|
self.src_b = src_b
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
|
|
||||||
|
|
||||||
|
|
||||||
def _samefile(a: str, b: str):
|
|
||||||
try:
|
|
||||||
return os.path.samefile(a, b)
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||||
"""
|
"""
|
||||||
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
|||||||
- A list of merge conflicts in dst/
|
- A list of merge conflicts in dst/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||||
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
|
|
||||||
):
|
|
||||||
self.ignore = ignore if ignore is not None else lambda f: False
|
self.ignore = ignore if ignore is not None else lambda f: False
|
||||||
|
|
||||||
# On case-insensitive filesystems, normalize paths to detect duplications
|
|
||||||
self.normalize_paths = normalize_paths
|
|
||||||
|
|
||||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||||
# bit to the relative path in the destination dir.
|
# bit to the relative path in the destination dir.
|
||||||
self.projection: str = ""
|
self.projection: str = ""
|
||||||
@@ -86,88 +71,10 @@ def __init__(
|
|||||||
# and can run mkdir in order.
|
# and can run mkdir in order.
|
||||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
# If the visitor is configured to normalize paths, keep a map of
|
|
||||||
# normalized path to: original path, root directory + relative path
|
|
||||||
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
|
|
||||||
|
|
||||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||||
self.files: Dict[str, Tuple[str, str]] = {}
|
self.files: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
# If the visitor is configured to normalize paths, keep a map of
|
|
||||||
# normalized path to: original path, root directory + relative path
|
|
||||||
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
|
|
||||||
|
|
||||||
def _in_directories(self, proj_rel_path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a path is already in the directory list
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return proj_rel_path.lower() in self._directories_normalized
|
|
||||||
else:
|
|
||||||
return proj_rel_path in self.directories
|
|
||||||
|
|
||||||
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
|
||||||
"""
|
|
||||||
Get the directory that is mapped to a path
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return self._directories_normalized[proj_rel_path.lower()]
|
|
||||||
else:
|
|
||||||
return (proj_rel_path, *self.directories[proj_rel_path])
|
|
||||||
|
|
||||||
def _del_directory(self, proj_rel_path: str):
|
|
||||||
"""
|
|
||||||
Remove a directory from the list of directories
|
|
||||||
"""
|
|
||||||
del self.directories[proj_rel_path]
|
|
||||||
if self.normalize_paths:
|
|
||||||
del self._directories_normalized[proj_rel_path.lower()]
|
|
||||||
|
|
||||||
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
|
|
||||||
"""
|
|
||||||
Add a directory to the list of directories.
|
|
||||||
Also stores the normalized version for later lookups
|
|
||||||
"""
|
|
||||||
self.directories[proj_rel_path] = (root, rel_path)
|
|
||||||
if self.normalize_paths:
|
|
||||||
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
|
||||||
|
|
||||||
def _in_files(self, proj_rel_path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a path is already in the files list
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return proj_rel_path.lower() in self._files_normalized
|
|
||||||
else:
|
|
||||||
return proj_rel_path in self.files
|
|
||||||
|
|
||||||
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
|
||||||
"""
|
|
||||||
Get the file that is mapped to a path
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return self._files_normalized[proj_rel_path.lower()]
|
|
||||||
else:
|
|
||||||
return (proj_rel_path, *self.files[proj_rel_path])
|
|
||||||
|
|
||||||
def _del_file(self, proj_rel_path: str):
|
|
||||||
"""
|
|
||||||
Remove a file from the list of files
|
|
||||||
"""
|
|
||||||
del self.files[proj_rel_path]
|
|
||||||
if self.normalize_paths:
|
|
||||||
del self._files_normalized[proj_rel_path.lower()]
|
|
||||||
|
|
||||||
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
|
|
||||||
"""
|
|
||||||
Add a file to the list of files
|
|
||||||
Also stores the normalized version for later lookups
|
|
||||||
"""
|
|
||||||
self.files[proj_rel_path] = (root, rel_path)
|
|
||||||
if self.normalize_paths:
|
|
||||||
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
|
||||||
|
|
||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
"""
|
"""
|
||||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||||
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
# Don't recurse when dir is ignored.
|
# Don't recurse when dir is ignored.
|
||||||
return False
|
return False
|
||||||
elif self._in_files(proj_rel_path):
|
elif proj_rel_path in self.files:
|
||||||
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
|
# Can't create a dir where a file is.
|
||||||
src_a = os.path.join(*self._file(proj_rel_path))
|
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||||
src_b = os.path.join(root, rel_path)
|
|
||||||
|
|
||||||
if not _samefile(src_a, src_b):
|
|
||||||
self.fatal_conflicts.append(
|
self.fatal_conflicts.append(
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
MergeConflict(
|
||||||
|
dst=proj_rel_path,
|
||||||
|
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||||
|
src_b=os.path.join(root, rel_path),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
elif proj_rel_path in self.directories:
|
||||||
# Remove the link in favor of the dir.
|
|
||||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
|
||||||
self._del_file(existing_proj_rel_path)
|
|
||||||
self._add_directory(proj_rel_path, root, rel_path)
|
|
||||||
return True
|
|
||||||
elif self._in_directories(proj_rel_path):
|
|
||||||
# No new directory, carry on.
|
# No new directory, carry on.
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# Register new directory.
|
# Register new directory.
|
||||||
self._add_directory(proj_rel_path, root, rel_path)
|
self.directories[proj_rel_path] = (root, rel_path)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
if handle_as_dir:
|
if handle_as_dir:
|
||||||
return self.before_visit_dir(root, rel_path, depth)
|
return self.before_visit_dir(root, rel_path, depth)
|
||||||
|
|
||||||
self.visit_file(root, rel_path, depth, symlink=True)
|
self.visit_file(root, rel_path, depth)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||||
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
|
|
||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
pass
|
pass
|
||||||
elif self._in_directories(proj_rel_path):
|
elif proj_rel_path in self.directories:
|
||||||
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
|
# Can't create a file where a dir is; fatal error
|
||||||
# in which case we simply drop the symlink in favor of the actual dir.
|
|
||||||
src_a = os.path.join(*self._directory(proj_rel_path))
|
|
||||||
src_b = os.path.join(root, rel_path)
|
|
||||||
if not symlink or not _samefile(src_a, src_b):
|
|
||||||
self.fatal_conflicts.append(
|
self.fatal_conflicts.append(
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
MergeConflict(
|
||||||
|
dst=proj_rel_path,
|
||||||
|
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||||
|
src_b=os.path.join(root, rel_path),
|
||||||
)
|
)
|
||||||
elif self._in_files(proj_rel_path):
|
)
|
||||||
|
elif proj_rel_path in self.files:
|
||||||
# When two files project to the same path, they conflict iff they are distinct.
|
# When two files project to the same path, they conflict iff they are distinct.
|
||||||
# If they are the same (i.e. one links to the other), register regular files rather
|
# If they are the same (i.e. one links to the other), register regular files rather
|
||||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||||
# file, not the symlink.
|
# file, not the symlink.
|
||||||
src_a = os.path.join(*self._file(proj_rel_path))
|
|
||||||
|
src_a = os.path.join(*self.files[proj_rel_path])
|
||||||
src_b = os.path.join(root, rel_path)
|
src_b = os.path.join(root, rel_path)
|
||||||
if not _samefile(src_a, src_b):
|
|
||||||
|
try:
|
||||||
|
samefile = os.path.samefile(src_a, src_b)
|
||||||
|
except OSError:
|
||||||
|
samefile = False
|
||||||
|
|
||||||
|
if not samefile:
|
||||||
# Distinct files produce a conflict.
|
# Distinct files produce a conflict.
|
||||||
self.file_conflicts.append(
|
self.file_conflicts.append(
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||||
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
if not symlink:
|
if not symlink:
|
||||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||||
# order of the files dict, which is grouped by root.
|
# order of the files dict, which is grouped by root.
|
||||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
del self.files[proj_rel_path]
|
||||||
self._del_file(existing_proj_rel_path)
|
self.files[proj_rel_path] = (root, rel_path)
|
||||||
self._add_file(proj_rel_path, root, rel_path)
|
|
||||||
else:
|
else:
|
||||||
# Otherwise register this file to be linked.
|
# Otherwise register this file to be linked.
|
||||||
self._add_file(proj_rel_path, root, rel_path)
|
self.files[proj_rel_path] = (root, rel_path)
|
||||||
|
|
||||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||||
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
|
|||||||
path = ""
|
path = ""
|
||||||
for part in self.projection.split(os.sep):
|
for part in self.projection.split(os.sep):
|
||||||
path = os.path.join(path, part)
|
path = os.path.join(path, part)
|
||||||
if not self._in_files(path):
|
if path not in self.files:
|
||||||
self._add_directory(path, "<projection>", path)
|
self.directories[path] = ("<projection>", path)
|
||||||
else:
|
else:
|
||||||
# Can't create a dir where a file is.
|
# Can't create a dir where a file is.
|
||||||
_, src_a_root, src_a_relpath = self._file(path)
|
src_a_root, src_a_relpath = self.files[path]
|
||||||
self.fatal_conflicts.append(
|
self.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
dst=path,
|
dst=path,
|
||||||
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
|||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
# If destination dir is a file in a src dir, add a conflict,
|
# If destination dir is a file in a src dir, add a conflict,
|
||||||
# and don't traverse deeper
|
# and don't traverse deeper
|
||||||
if self.src._in_files(rel_path):
|
if rel_path in self.src.files:
|
||||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
|
|
||||||
# If destination dir was also a src dir, remove the mkdir
|
# If destination dir was also a src dir, remove the mkdir
|
||||||
# action, and traverse deeper.
|
# action, and traverse deeper.
|
||||||
if self.src._in_directories(rel_path):
|
if rel_path in self.src.directories:
|
||||||
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
|
del self.src.directories[rel_path]
|
||||||
self.src._del_directory(existing_proj_rel_path)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# If the destination dir does not appear in the src dir,
|
# If the destination dir does not appear in the src dir,
|
||||||
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
be seen as files; we should not accidentally merge
|
be seen as files; we should not accidentally merge
|
||||||
source dir with a symlinked dest dir.
|
source dir with a symlinked dest dir.
|
||||||
"""
|
"""
|
||||||
|
# Always conflict
|
||||||
self.visit_file(root, rel_path, depth)
|
if rel_path in self.src.directories:
|
||||||
|
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||||
# Never descend into symlinked target dirs.
|
|
||||||
return False
|
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
# Can't merge a file if target already exists
|
|
||||||
if self.src._in_directories(rel_path):
|
|
||||||
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
|
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
elif self.src._in_files(rel_path):
|
if rel_path in self.src.files:
|
||||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
|
self.src.fatal_conflicts.append(
|
||||||
|
MergeConflict(
|
||||||
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Never descend into symlinked target dirs.
|
||||||
|
return False
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# Can't merge a file if target already exists
|
||||||
|
if rel_path in self.src.directories:
|
||||||
|
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||||
|
self.src.fatal_conflicts.append(
|
||||||
|
MergeConflict(
|
||||||
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
elif rel_path in self.src.files:
|
||||||
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
|
|||||||
@@ -269,7 +269,7 @@ def __init__(
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _poll_interval_generator(
|
def _poll_interval_generator(
|
||||||
_wait_times: Optional[Tuple[float, float, float]] = None,
|
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||||
) -> Generator[float, None, None]:
|
) -> Generator[float, None, None]:
|
||||||
"""This implements a backoff scheme for polling a contended resource
|
"""This implements a backoff scheme for polling a contended resource
|
||||||
by suggesting a succession of wait times between polls.
|
by suggesting a succession of wait times between polls.
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
"""Utility classes for logging the output of blocks of code."""
|
"""Utility classes for logging the output of blocks of code.
|
||||||
|
"""
|
||||||
import atexit
|
import atexit
|
||||||
import ctypes
|
import ctypes
|
||||||
import errno
|
import errno
|
||||||
|
|||||||
@@ -13,18 +13,6 @@
|
|||||||
__version__ = "1.0.0.dev0"
|
__version__ = "1.0.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
|
||||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
|
||||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
|
||||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
|
||||||
#: Spack version.
|
|
||||||
package_api_version = (1, 0)
|
|
||||||
|
|
||||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
|
||||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
|
||||||
#: compatibility with vX.0.
|
|
||||||
min_package_api_version = (1, 0)
|
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -91,6 +79,4 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
"package_api_version",
|
|
||||||
"min_package_api_version",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
|||||||
for dep_name, dep in deps_by_name.items():
|
for dep_name, dep in deps_by_name.items():
|
||||||
|
|
||||||
def check_virtual_with_variants(spec, msg):
|
def check_virtual_with_variants(spec, msg):
|
||||||
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
|
if not spec.virtual or not spec.variants:
|
||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
|
|||||||
@@ -923,7 +923,7 @@ class FileTypes:
|
|||||||
UNKNOWN = 2
|
UNKNOWN = 2
|
||||||
|
|
||||||
|
|
||||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
|
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
||||||
|
|
||||||
|
|
||||||
def file_type(f: IO[bytes]) -> int:
|
def file_type(f: IO[bytes]) -> int:
|
||||||
@@ -2529,10 +2529,10 @@ def install_root_node(
|
|||||||
allow_missing: when true, allows installing a node with missing dependencies
|
allow_missing: when true, allows installing a node with missing dependencies
|
||||||
"""
|
"""
|
||||||
# Early termination
|
# Early termination
|
||||||
if spec.external or not spec.concrete:
|
if spec.external or spec.virtual:
|
||||||
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
|
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||||
return
|
return
|
||||||
elif spec.installed and not force:
|
elif spec.concrete and spec.installed and not force:
|
||||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
PackageInstaller(
|
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||||
[concrete_spec.package],
|
|
||||||
fail_fast=True,
|
|
||||||
package_use_cache=False,
|
|
||||||
dependencies_use_cache=False,
|
|
||||||
).install()
|
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
|||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
if not source_is_enabled(current_config):
|
if not source_is_enabled(current_config):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -881,6 +881,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
|
def load_external_modules(pkg):
|
||||||
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
|
Traverse a package's dependencies and load any external modules
|
||||||
|
associated with them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package to load deps for
|
||||||
|
"""
|
||||||
|
for dep in list(pkg.spec.traverse()):
|
||||||
|
external_modules = dep.external_modules or []
|
||||||
|
for external_module in external_modules:
|
||||||
|
load_module(external_module)
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
if context not in (Context.BUILD, Context.TEST):
|
if context not in (Context.BUILD, Context.TEST):
|
||||||
@@ -931,7 +946,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
load_external_modules(setup_context)
|
load_external_modules(pkg)
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
@@ -1220,21 +1235,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(context: SetupContext) -> None:
|
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
|
||||||
|
|
||||||
Traverse a package's dependencies and load any external modules
|
|
||||||
associated with them.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: A populated SetupContext object
|
|
||||||
"""
|
|
||||||
for spec, _ in context.external:
|
|
||||||
external_modules = spec.external_modules or []
|
|
||||||
for external_module in external_modules:
|
|
||||||
load_module(external_module)
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
function: Callable,
|
function: Callable,
|
||||||
|
|||||||
@@ -12,7 +12,6 @@
|
|||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import depends_on
|
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -278,10 +277,6 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append("# ROCm")
|
entries.append("# ROCm")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
if spec.satisfies("^blt@0.7:"):
|
|
||||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
|
||||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
|
||||||
else:
|
|
||||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||||
@@ -291,11 +286,8 @@ def initconfig_hardware_entries(self):
|
|||||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||||
entries.append(
|
entries.append(
|
||||||
cmake_cache_filepath(
|
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
||||||
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
|
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
archs = self.spec.variants["amdgpu_target"].value
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
if archs[0] != "none":
|
if archs[0] != "none":
|
||||||
arch_str = ";".join(archs)
|
arch_str = ";".join(archs)
|
||||||
@@ -379,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
|
|||||||
|
|
||||||
CMakeBuilder = CachedCMakeBuilder
|
CMakeBuilder = CachedCMakeBuilder
|
||||||
|
|
||||||
# These dependencies are assumed in the builder
|
|
||||||
depends_on("c", type="build")
|
|
||||||
depends_on("cxx", type="build")
|
|
||||||
|
|
||||||
def flag_handler(self, name, flags):
|
def flag_handler(self, name, flags):
|
||||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||||
return None, None, None # handled in the cmake cache
|
return None, None, None # handled in the cmake cache
|
||||||
|
|||||||
@@ -70,16 +70,10 @@ def build_directory(self):
|
|||||||
"""Return the directory containing the main Cargo.toml."""
|
"""Return the directory containing the main Cargo.toml."""
|
||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
|
||||||
def std_build_args(self):
|
|
||||||
"""Standard arguments for ``cargo build`` provided as a property for
|
|
||||||
convenience of package writers."""
|
|
||||||
return ["-j", str(self.pkg.module.make_jobs)]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``cargo build``."""
|
"""Arguments for ``cargo build``."""
|
||||||
return []
|
return ["-j", str(self.pkg.module.make_jobs)]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
@@ -94,9 +88,7 @@ def build(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.cargo(
|
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||||
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
|
|
||||||
)
|
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
|||||||
@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
|
|||||||
variant("strip", default=False, description="Strip targets on install")
|
variant("strip", default=False, description="Strip targets on install")
|
||||||
depends_on("meson", type="build")
|
depends_on("meson", type="build")
|
||||||
depends_on("ninja", type="build")
|
depends_on("ninja", type="build")
|
||||||
# Meson uses pkg-config for dependency detection, and this dependency is
|
|
||||||
# often overlooked by packages that use meson as a build system.
|
|
||||||
depends_on("pkgconfig", type="build")
|
|
||||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
# Only if environment modifications are desired (default is +envmods)
|
# Only if environment modifications are desired (default is +envmods)
|
||||||
if "+envmods" in self.spec:
|
if "~envmods" not in self.spec:
|
||||||
env.extend(
|
env.extend(
|
||||||
EnvironmentModifications.from_sourcing_file(
|
EnvironmentModifications.from_sourcing_file(
|
||||||
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -14,16 +13,16 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set, Union
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.request import Request
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.path
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.builder
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -33,7 +32,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -42,7 +40,6 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.version import GitVersion, StandardVersion
|
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
IS_WINDOWS,
|
IS_WINDOWS,
|
||||||
@@ -81,53 +78,11 @@ def get_change_revisions():
|
|||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
def get_added_versions(
|
|
||||||
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
|
||||||
path: str,
|
|
||||||
from_ref: str = "HEAD~1",
|
|
||||||
to_ref: str = "HEAD",
|
|
||||||
) -> List[Union[StandardVersion, GitVersion]]:
|
|
||||||
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
|
||||||
Args:
|
|
||||||
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
|
||||||
path (str): path to the package.py
|
|
||||||
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
|
||||||
to_ref (str): newer git ref, defaults to `HEAD`
|
|
||||||
Returns: list of versions added between refs
|
|
||||||
"""
|
|
||||||
git_exe = spack.util.git.git(required=True)
|
|
||||||
|
|
||||||
# Gather git diff
|
|
||||||
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
|
||||||
|
|
||||||
# Store added and removed versions
|
|
||||||
# Removed versions are tracked here to determine when versions are moved in a file
|
|
||||||
# and show up as both added and removed in a git diff.
|
|
||||||
added_checksums = set()
|
|
||||||
removed_checksums = set()
|
|
||||||
|
|
||||||
# Scrape diff for modified versions and prune added versions if they show up
|
|
||||||
# as also removed (which means they've actually just moved in the file and
|
|
||||||
# we shouldn't need to rechecksum them)
|
|
||||||
for checksum in checksums_version_dict.keys():
|
|
||||||
for line in diff_lines:
|
|
||||||
if checksum in line:
|
|
||||||
if line.startswith("+"):
|
|
||||||
added_checksums.add(checksum)
|
|
||||||
if line.startswith("-"):
|
|
||||||
removed_checksums.add(checksum)
|
|
||||||
|
|
||||||
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
|
||||||
|
|
||||||
|
|
||||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
"""Given an environment manifest path and two revisions to compare, return
|
"""Given an environment manifest path and two revisions to compare, return
|
||||||
whether or not the stack was changed. Returns True if the environment
|
whether or not the stack was changed. Returns True if the environment
|
||||||
manifest changed between the provided revisions (or additionally if the
|
manifest changed between the provided revisions (or additionally if the
|
||||||
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
||||||
# git returns posix paths always, normalize input to be comptaible
|
|
||||||
# with that
|
|
||||||
env_path = llnl.path.convert_to_posix_path(env_path)
|
|
||||||
git = spack.util.git.git()
|
git = spack.util.git.git()
|
||||||
if git:
|
if git:
|
||||||
with fs.working_dir(spack.paths.prefix):
|
with fs.working_dir(spack.paths.prefix):
|
||||||
@@ -265,7 +220,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
|||||||
|
|
||||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||||
reason_msg = ", ".join(reasons)
|
reason_msg = ", ".join(reasons)
|
||||||
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
|
|
||||||
if not prune:
|
if not prune:
|
||||||
status = colorize("@*g{[x]} ")
|
status = colorize("@*g{[x]} ")
|
||||||
@@ -622,25 +577,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
tty.debug(f"job spec: {job_spec}")
|
tty.debug(f"job spec: {job_spec}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||||
except spack.error.SpackError as e:
|
job_pkg = pkg_cls(job_spec)
|
||||||
tty.error(f"Cannot copy logs: {str(e)}")
|
tty.debug(f"job package: {job_pkg}")
|
||||||
|
except AssertionError:
|
||||||
|
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||||
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get the package's archived files
|
stage_dir = job_pkg.stage.path
|
||||||
archive_files = []
|
tty.debug(f"stage dir: {stage_dir}")
|
||||||
archive_root = package_metadata_root / "archived-files"
|
for file in [
|
||||||
if archive_root.is_dir():
|
job_pkg.log_path,
|
||||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
job_pkg.env_mods_path,
|
||||||
else:
|
*spack.builder.create(job_pkg).archive_files,
|
||||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
]:
|
||||||
tty.warn(msg)
|
copy_files_to_artifacts(file, job_log_dir)
|
||||||
|
|
||||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
|
||||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
|
||||||
|
|
||||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
|
||||||
copy_files_to_artifacts(str(f), job_log_dir)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
@@ -660,7 +612,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
|
|
||||||
@@ -668,10 +620,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
|||||||
|
|
||||||
url (str): Complete url to artifacts.zip file
|
url (str): Complete url to artifacts.zip file
|
||||||
work_dir (str): Path to destination where artifacts should be extracted
|
work_dir (str): Path to destination where artifacts should be extracted
|
||||||
|
|
||||||
Output:
|
|
||||||
|
|
||||||
Artifacts root path relative to the archive root
|
|
||||||
"""
|
"""
|
||||||
tty.msg(f"Fetching artifacts from: {url}")
|
tty.msg(f"Fetching artifacts from: {url}")
|
||||||
|
|
||||||
@@ -689,25 +637,13 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
|||||||
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
with open(artifacts_zip_path, "wb") as out_file:
|
with open(artifacts_zip_path, "wb") as out_file:
|
||||||
shutil.copyfileobj(response, out_file)
|
shutil.copyfileobj(response, out_file)
|
||||||
|
except OSError as e:
|
||||||
|
raise SpackError(f"Error fetching artifacts: {e}")
|
||||||
|
|
||||||
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||||
zip_file.extractall(work_dir)
|
zip_file.extractall(work_dir)
|
||||||
# Get the artifact root
|
|
||||||
artifact_root = ""
|
|
||||||
for f in zip_file.filelist:
|
|
||||||
if "spack.lock" in f.filename:
|
|
||||||
artifact_root = os.path.dirname(os.path.dirname(f.filename))
|
|
||||||
break
|
|
||||||
except OSError as e:
|
|
||||||
raise SpackError(f"Error fetching artifacts: {e}")
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.remove(artifacts_zip_path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
# If the file doesn't exist we are already raising
|
|
||||||
pass
|
|
||||||
|
|
||||||
return artifact_root
|
os.remove(artifacts_zip_path)
|
||||||
|
|
||||||
|
|
||||||
def get_spack_info():
|
def get_spack_info():
|
||||||
@@ -821,7 +757,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
|
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||||
attempt to setup an environment in which the failure can be reproduced
|
attempt to setup an environment in which the failure can be reproduced
|
||||||
locally. This entails the following:
|
locally. This entails the following:
|
||||||
@@ -835,11 +771,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
commands to run to reproduce the build once inside the container.
|
commands to run to reproduce the build once inside the container.
|
||||||
"""
|
"""
|
||||||
work_dir = os.path.realpath(work_dir)
|
work_dir = os.path.realpath(work_dir)
|
||||||
if os.path.exists(work_dir) and os.listdir(work_dir):
|
|
||||||
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
|
|
||||||
|
|
||||||
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||||
artifact_root = download_and_extract_artifacts(url, work_dir)
|
download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
gpg_path = None
|
gpg_path = None
|
||||||
if gpg_url:
|
if gpg_url:
|
||||||
@@ -901,9 +834,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
with open(repro_file, encoding="utf-8") as fd:
|
with open(repro_file, encoding="utf-8") as fd:
|
||||||
repro_details = json.load(fd)
|
repro_details = json.load(fd)
|
||||||
|
|
||||||
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
|
|
||||||
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
|
|
||||||
|
|
||||||
repro_dir = os.path.dirname(repro_file)
|
repro_dir = os.path.dirname(repro_file)
|
||||||
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
||||||
|
|
||||||
@@ -964,9 +894,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||||
|
|
||||||
if use_local_head:
|
|
||||||
commit_1 = "HEAD"
|
|
||||||
else:
|
|
||||||
# Try the more specific merge commit regex first
|
# Try the more specific merge commit regex first
|
||||||
m = merge_commit_regex.search(spack_info)
|
m = merge_commit_regex.search(spack_info)
|
||||||
if m:
|
if m:
|
||||||
@@ -1052,8 +979,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||||
)
|
)
|
||||||
|
|
||||||
# Attempt to create a unique name for the reproducer container
|
|
||||||
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
|
|
||||||
docker_command = [
|
docker_command = [
|
||||||
runtime,
|
runtime,
|
||||||
"run",
|
"run",
|
||||||
@@ -1061,14 +986,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
"-t",
|
"-t",
|
||||||
"--rm",
|
"--rm",
|
||||||
"--name",
|
"--name",
|
||||||
f"spack_reproducer{container_suffix}",
|
"spack_reproducer",
|
||||||
"-v",
|
"-v",
|
||||||
":".join([work_dir, mounted_workdir, "Z"]),
|
":".join([work_dir, mounted_workdir, "Z"]),
|
||||||
"-v",
|
"-v",
|
||||||
":".join(
|
":".join(
|
||||||
[
|
[
|
||||||
os.path.join(work_dir, artifact_root),
|
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||||
os.path.join(mount_as_dir, artifact_root),
|
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||||
"Z",
|
"Z",
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
|||||||
if len(matching_specs) <= 1:
|
if len(matching_specs) <= 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
@@ -471,11 +471,12 @@ def get_arg(name, default=None):
|
|||||||
nfmt = "{fullname}" if namespaces else "{name}"
|
nfmt = "{fullname}" if namespaces else "{name}"
|
||||||
ffmt = ""
|
ffmt = ""
|
||||||
if full_compiler or flags:
|
if full_compiler or flags:
|
||||||
ffmt += "{compiler_flags} {%compiler.name}"
|
ffmt += "{%compiler.name}"
|
||||||
if full_compiler:
|
if full_compiler:
|
||||||
ffmt += "{@compiler.version}"
|
ffmt += "{@compiler.version}"
|
||||||
|
ffmt += " {compiler_flags}"
|
||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, Optional, Tuple
|
from typing import Dict, Optional
|
||||||
|
|
||||||
import llnl.string
|
import llnl.string
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -181,11 +181,7 @@ def checksum(parser, args):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
if args.add_to_package:
|
if args.add_to_package:
|
||||||
path = spack.repo.PATH.filename_for_package_name(pkg.name)
|
add_versions_to_package(pkg, version_lines, args.batch)
|
||||||
num_versions_added = add_versions_to_pkg(path, version_lines)
|
|
||||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
|
|
||||||
if not args.batch and sys.stdin.isatty():
|
|
||||||
editor(path)
|
|
||||||
|
|
||||||
|
|
||||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||||
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
|||||||
tty.die("Invalid checksums found.")
|
tty.die("Invalid checksums found.")
|
||||||
|
|
||||||
|
|
||||||
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
|
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||||
"""Returns a tuple of number of versions added and the package's modified contents."""
|
"""
|
||||||
|
Add checksumed versions to a package's instructions and open a user's
|
||||||
|
editor so they may double check the work of the function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||||
|
version_lines (str): A string of rendered version lines.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Get filename and path for package
|
||||||
|
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||||
num_versions_added = 0
|
num_versions_added = 0
|
||||||
|
|
||||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||||
|
|
||||||
@@ -245,7 +252,9 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
|
|||||||
if match:
|
if match:
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
new_versions.append((Version(match.group(1)), ver_line))
|
||||||
|
|
||||||
split_contents = version_statement_re.split(package_src)
|
with open(filename, "r+", encoding="utf-8") as f:
|
||||||
|
contents = f.read()
|
||||||
|
split_contents = version_statement_re.split(contents)
|
||||||
|
|
||||||
for i, subsection in enumerate(split_contents):
|
for i, subsection in enumerate(split_contents):
|
||||||
# If there are no more versions to add we should exit
|
# If there are no more versions to add we should exit
|
||||||
@@ -264,15 +273,12 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
|
|||||||
elif parsed_version == new_versions[0][0]:
|
elif parsed_version == new_versions[0][0]:
|
||||||
new_versions.pop(0)
|
new_versions.pop(0)
|
||||||
|
|
||||||
return num_versions_added, "".join(split_contents)
|
# Seek back to the start of the file so we can rewrite the file contents.
|
||||||
|
f.seek(0)
|
||||||
|
f.writelines("".join(split_contents))
|
||||||
|
|
||||||
|
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||||
|
tty.msg(f"Open {filename} to review the additions.")
|
||||||
|
|
||||||
def add_versions_to_pkg(path: str, version_lines: str) -> int:
|
if sys.stdout.isatty() and not is_batch:
|
||||||
"""Add new versions to a package.py file. Returns the number of versions added."""
|
editor(filename)
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
package_src = f.read()
|
|
||||||
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
|
|
||||||
if num_versions_added > 0:
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(package_src)
|
|
||||||
return num_versions_added
|
|
||||||
|
|||||||
@@ -4,15 +4,12 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
from typing import Dict
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util import tty
|
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
@@ -21,22 +18,12 @@
|
|||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.package_base
|
|
||||||
import spack.paths
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.git
|
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
import spack.version
|
|
||||||
|
|
||||||
description = "manage continuous integration pipelines"
|
description = "manage continuous integration pipelines"
|
||||||
section = "build"
|
section = "build"
|
||||||
@@ -45,7 +32,6 @@
|
|||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
|
|
||||||
|
|
||||||
|
|
||||||
def deindent(desc):
|
def deindent(desc):
|
||||||
@@ -190,11 +176,6 @@ def setup_parser(subparser):
|
|||||||
reproduce.add_argument(
|
reproduce.add_argument(
|
||||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||||
)
|
)
|
||||||
reproduce.add_argument(
|
|
||||||
"--use-local-head",
|
|
||||||
help="Use the HEAD of the local Spack instead of reproducing a commit",
|
|
||||||
action="store_true",
|
|
||||||
)
|
|
||||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||||
gpg_group.add_argument(
|
gpg_group.add_argument(
|
||||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||||
@@ -205,16 +186,6 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
reproduce.set_defaults(func=ci_reproduce)
|
reproduce.set_defaults(func=ci_reproduce)
|
||||||
|
|
||||||
# Verify checksums inside of ci workflows
|
|
||||||
verify_versions = subparsers.add_parser(
|
|
||||||
"verify-versions",
|
|
||||||
description=deindent(ci_verify_versions.__doc__),
|
|
||||||
help=spack.cmd.first_line(ci_verify_versions.__doc__),
|
|
||||||
)
|
|
||||||
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
|
|
||||||
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
|
|
||||||
verify_versions.set_defaults(func=ci_verify_versions)
|
|
||||||
|
|
||||||
|
|
||||||
def ci_generate(args):
|
def ci_generate(args):
|
||||||
"""generate jobs file from a CI-aware spack file
|
"""generate jobs file from a CI-aware spack file
|
||||||
@@ -451,7 +422,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
root_install_args = install_args + ["--only=package"]
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
@@ -488,7 +459,8 @@ def ci_rebuild(args):
|
|||||||
job_spec.to_dict(hash=ht.dag_hash),
|
job_spec.to_dict(hash=ht.dag_hash),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||||
|
# any logs from the staging directory to artifacts now
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
# If the installation succeeded and we're running stand-alone tests for
|
# If the installation succeeded and we're running stand-alone tests for
|
||||||
@@ -636,12 +608,7 @@ def ci_reproduce(args):
|
|||||||
gpg_key_url = None
|
gpg_key_url = None
|
||||||
|
|
||||||
return spack_ci.reproduce_ci_job(
|
return spack_ci.reproduce_ci_job(
|
||||||
args.job_url,
|
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
|
||||||
args.working_dir,
|
|
||||||
args.autostart,
|
|
||||||
gpg_key_url,
|
|
||||||
args.runtime,
|
|
||||||
args.use_local_head,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -683,159 +650,6 @@ def _gitlab_artifacts_url(url: str) -> str:
|
|||||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||||
|
|
||||||
|
|
||||||
def validate_standard_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the checksum of a package version based on a tarball.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
url_dict: Dict[spack.version.StandardVersion, str] = {}
|
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
url = pkg.find_valid_url_for_version(version)
|
|
||||||
url_dict[version] = url
|
|
||||||
|
|
||||||
version_hashes = spack.stage.get_checksums_for_versions(
|
|
||||||
url_dict, pkg.name, fetch_options=pkg.fetch_options
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_checksums = True
|
|
||||||
for version, sha in version_hashes.items():
|
|
||||||
if sha != pkg.versions[version]["sha256"]:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid checksum found {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {pkg.versions[version]['sha256']}\n"
|
|
||||||
f" [Downloaded] {sha}"
|
|
||||||
)
|
|
||||||
valid_checksums = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
|
|
||||||
|
|
||||||
return valid_checksums
|
|
||||||
|
|
||||||
|
|
||||||
def validate_git_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the commit and tag of a package version based on a git repository.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
valid_commit = True
|
|
||||||
for version in versions:
|
|
||||||
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
|
|
||||||
with spack.stage.Stage(fetcher) as stage:
|
|
||||||
known_commit = pkg.versions[version]["commit"]
|
|
||||||
try:
|
|
||||||
stage.fetch()
|
|
||||||
except spack.error.FetchError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid commit for {pkg.name}@{version}\n"
|
|
||||||
f" {known_commit} could not be checked out in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Test if the specified tag matches the commit in the package.py
|
|
||||||
# We retrieve the commit associated with a tag and compare it to the
|
|
||||||
# commit that is located in the package.py file.
|
|
||||||
if "tag" in pkg.versions[version]:
|
|
||||||
tag = pkg.versions[version]["tag"]
|
|
||||||
try:
|
|
||||||
with fs.working_dir(stage.source_path):
|
|
||||||
found_commit = fetcher.git(
|
|
||||||
"rev-list", "-n", "1", tag, output=str, error=str
|
|
||||||
).strip()
|
|
||||||
except spack.util.executable.ProcessError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid tag for {pkg.name}@{version}\n"
|
|
||||||
f" {tag} could not be found in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
if found_commit != known_commit:
|
|
||||||
tty.error(
|
|
||||||
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {known_commit}\n"
|
|
||||||
f" [Downloaded] {found_commit}"
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If we have downloaded the repository, found the commit, and compared
|
|
||||||
# the tag (if specified) we can conclude that the version is pointing
|
|
||||||
# at what we would expect.
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
|
|
||||||
|
|
||||||
return valid_commit
|
|
||||||
|
|
||||||
|
|
||||||
def ci_verify_versions(args):
|
|
||||||
"""validate version checksum & commits between git refs
|
|
||||||
This command takes a from_ref and to_ref arguments and
|
|
||||||
then parses the git diff between the two to determine which packages
|
|
||||||
have been modified verifies the new checksums inside of them.
|
|
||||||
"""
|
|
||||||
with fs.working_dir(spack.paths.prefix):
|
|
||||||
# We use HEAD^1 explicitly on the merge commit created by
|
|
||||||
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
|
|
||||||
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
|
|
||||||
|
|
||||||
# Get a list of package names from the modified files.
|
|
||||||
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
|
|
||||||
|
|
||||||
failed_version = False
|
|
||||||
for pkg_name, path in pkgs:
|
|
||||||
spec = spack.spec.Spec(pkg_name)
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
|
|
||||||
# Skip checking manual download packages and trust the maintainers
|
|
||||||
if pkg.manual_download:
|
|
||||||
tty.warn(f"Skipping manual download package: {pkg_name}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Store versions checksums / commits for future loop
|
|
||||||
checksums_version_dict = {}
|
|
||||||
commits_version_dict = {}
|
|
||||||
for version in pkg.versions:
|
|
||||||
# If the package version defines a sha256 we'll use that as the high entropy
|
|
||||||
# string to detect which versions have been added between from_ref and to_ref
|
|
||||||
if "sha256" in pkg.versions[version]:
|
|
||||||
checksums_version_dict[pkg.versions[version]["sha256"]] = version
|
|
||||||
|
|
||||||
# If a package version instead defines a commit we'll use that as a
|
|
||||||
# high entropy string to detect new versions.
|
|
||||||
elif "commit" in pkg.versions[version]:
|
|
||||||
commits_version_dict[pkg.versions[version]["commit"]] = version
|
|
||||||
|
|
||||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
|
||||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
|
||||||
|
|
||||||
with fs.working_dir(spack.paths.prefix):
|
|
||||||
added_checksums = spack_ci.get_added_versions(
|
|
||||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
added_commits = spack_ci.get_added_versions(
|
|
||||||
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
|
|
||||||
if added_checksums:
|
|
||||||
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
|
|
||||||
|
|
||||||
if added_commits:
|
|
||||||
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
|
|
||||||
|
|
||||||
if failed_version:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def ci(parser, args):
|
def ci(parser, args):
|
||||||
if args.func:
|
if args.func:
|
||||||
return args.func(args)
|
return args.func(args)
|
||||||
|
|||||||
@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
|
|||||||
# the const from the constructor or a value from the CLI.
|
# the const from the constructor or a value from the CLI.
|
||||||
# Note that this is only called if the argument is actually
|
# Note that this is only called if the argument is actually
|
||||||
# specified on the command line.
|
# specified on the command line.
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
|
|||||||
if spack.config.get(key_path, scope=scope):
|
if spack.config.get(key_path, scope=scope):
|
||||||
ideal_scope_to_modify = scope
|
ideal_scope_to_modify = scope
|
||||||
break
|
break
|
||||||
# If we find our key in a specific scope, that's the one we want
|
|
||||||
# to modify. Otherwise we use the default write scope.
|
|
||||||
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
|
||||||
|
|
||||||
update_path = f"{key_path}:[{str(spec)}]"
|
update_path = f"{key_path}:[{str(spec)}]"
|
||||||
spack.config.add(update_path, scope=write_scope)
|
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||||
else:
|
else:
|
||||||
raise ValueError("'config change' can currently only change 'require' sections")
|
raise ValueError("'config change' can currently only change 'require' sections")
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,23 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
|
import spack.util.git
|
||||||
|
from spack.util.executable import which
|
||||||
|
|
||||||
description = "debugging commands for troubleshooting Spack"
|
description = "debugging commands for troubleshooting Spack"
|
||||||
section = "developer"
|
section = "developer"
|
||||||
@@ -15,9 +27,63 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
||||||
|
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
|
||||||
sp.add_parser("report", help="print information useful for bug reports")
|
sp.add_parser("report", help="print information useful for bug reports")
|
||||||
|
|
||||||
|
|
||||||
|
def _debug_tarball_suffix():
|
||||||
|
now = datetime.now()
|
||||||
|
suffix = now.strftime("%Y-%m-%d-%H%M%S")
|
||||||
|
|
||||||
|
git = spack.util.git.git()
|
||||||
|
if not git:
|
||||||
|
return "nobranch-nogit-%s" % suffix
|
||||||
|
|
||||||
|
with working_dir(spack.paths.prefix):
|
||||||
|
if not os.path.isdir(".git"):
|
||||||
|
return "nobranch.nogit.%s" % suffix
|
||||||
|
|
||||||
|
# Get symbolic branch name and strip any special chars (mainly '/')
|
||||||
|
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
|
||||||
|
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
|
||||||
|
|
||||||
|
# Get the commit hash too.
|
||||||
|
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
|
||||||
|
|
||||||
|
if symbolic == commit:
|
||||||
|
return "nobranch.%s.%s" % (commit, suffix)
|
||||||
|
else:
|
||||||
|
return "%s.%s.%s" % (symbolic, commit, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_tarball(args):
|
||||||
|
tar = which("tar")
|
||||||
|
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||||
|
tarball_path = os.path.abspath(tarball_name)
|
||||||
|
|
||||||
|
base = os.path.basename(str(spack.store.STORE.root))
|
||||||
|
transform_args = []
|
||||||
|
# Currently --transform and -s are not supported by Windows native tar
|
||||||
|
if "GNU" in tar("--version", output=str):
|
||||||
|
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||||
|
elif sys.platform != "win32":
|
||||||
|
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||||
|
|
||||||
|
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||||
|
with working_dir(wd):
|
||||||
|
files = [spack.store.STORE.db._index_path]
|
||||||
|
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
||||||
|
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
||||||
|
files = [os.path.relpath(f) for f in files]
|
||||||
|
|
||||||
|
args = ["-czf", tarball_path]
|
||||||
|
args += transform_args
|
||||||
|
args += files
|
||||||
|
tar(*args)
|
||||||
|
|
||||||
|
tty.msg("Created %s" % tarball_name)
|
||||||
|
|
||||||
|
|
||||||
def report(args):
|
def report(args):
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.default_operating_system()
|
||||||
@@ -29,5 +95,5 @@ def report(args):
|
|||||||
|
|
||||||
|
|
||||||
def debug(parser, args):
|
def debug(parser, args):
|
||||||
if args.debug_command == "report":
|
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||||
report(args)
|
action[args.debug_command](args)
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||||
deps = spack.store.STORE.db.installed_relatives(
|
deps = spack.store.STORE.db.installed_relatives(
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||||
|
|||||||
@@ -73,7 +73,7 @@
|
|||||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||||
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -383,10 +383,8 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
|||||||
query = " ".join(str(s) for s in args.constraint_specs)
|
query = " ".join(str(s) for s in args.constraint_specs)
|
||||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||||
for s in specs:
|
for s in specs:
|
||||||
spec_fmt = (
|
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||||
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||||
"{arch=architecture} {%compiler}"
|
|
||||||
)
|
|
||||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||||
|
|
||||||
|
|||||||
@@ -41,11 +41,7 @@ def providers(parser, args):
|
|||||||
specs = spack.cmd.parse_specs(args.virtual_package)
|
specs = spack.cmd.parse_specs(args.virtual_package)
|
||||||
|
|
||||||
# Check prerequisites
|
# Check prerequisites
|
||||||
non_virtual = [
|
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
|
||||||
str(s)
|
|
||||||
for s in specs
|
|
||||||
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
|
|
||||||
]
|
|
||||||
if non_virtual:
|
if non_virtual:
|
||||||
msg = "non-virtual specs cannot be part of the query "
|
msg = "non-virtual specs cannot be part of the query "
|
||||||
msg += "[{0}]\n".format(", ".join(non_virtual))
|
msg += "[{0}]\n".format(", ".join(non_virtual))
|
||||||
|
|||||||
@@ -6,9 +6,8 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
from itertools import islice, zip_longest
|
from itertools import islice, zip_longest
|
||||||
from typing import Callable, Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@@ -17,9 +16,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.spack_yaml
|
|
||||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
|
||||||
from spack.tokenize import Token
|
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
@@ -202,13 +198,6 @@ def setup_parser(subparser):
|
|||||||
action="append",
|
action="append",
|
||||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
|
||||||
"--spec-strings",
|
|
||||||
action="store_true",
|
|
||||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
|
||||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
|
||||||
"will be removed in Spack v1.0.",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||||
|
|
||||||
@@ -518,196 +507,7 @@ def _bootstrap_dev_dependencies():
|
|||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
|
||||||
# only move the compiler to the back if it exists and is not already at the end
|
|
||||||
if not 0 <= idx < len(blocks) - 1:
|
|
||||||
return
|
|
||||||
# if there's only whitespace after the compiler, don't move it
|
|
||||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
|
||||||
return
|
|
||||||
# rotate left and always add at least one WS token between compiler and previous token
|
|
||||||
compiler_block = blocks.pop(idx)
|
|
||||||
if compiler_block[0].kind != SpecTokens.WS:
|
|
||||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
|
||||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
|
||||||
# WS tokens.
|
|
||||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
|
||||||
blocks[0].pop(0)
|
|
||||||
blocks.append(compiler_block)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_format(spec_str: str) -> Optional[str]:
|
|
||||||
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
|
||||||
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
|
||||||
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
|
||||||
# the end when we hit a dependency ^... or the end of a string.
|
|
||||||
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
|
||||||
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
|
||||||
|
|
||||||
current_block: List[Token] = []
|
|
||||||
blocks: List[List[Token]] = []
|
|
||||||
compiler_block_idx = -1
|
|
||||||
in_edge_attr = False
|
|
||||||
|
|
||||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
|
||||||
if token.kind == SpecTokens.UNEXPECTED:
|
|
||||||
# parsing error, we cannot fix this string.
|
|
||||||
return None
|
|
||||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
|
||||||
# multiple compilers are not supported in Spack v0.x, so early return
|
|
||||||
if compiler_block_idx != -1:
|
|
||||||
return None
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
compiler_block_idx = len(blocks) - 1
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.START_EDGE_PROPERTIES,
|
|
||||||
SpecTokens.DEPENDENCY,
|
|
||||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
|
||||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
|
||||||
):
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
compiler_block_idx = -1
|
|
||||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = True
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = False
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif in_edge_attr:
|
|
||||||
current_block.append(token)
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.VERSION_HASH_PAIR,
|
|
||||||
SpecTokens.GIT_VERSION,
|
|
||||||
SpecTokens.VERSION,
|
|
||||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
|
||||||
SpecTokens.BOOL_VARIANT,
|
|
||||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.DAG_HASH,
|
|
||||||
):
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.WS:
|
|
||||||
current_block.append(token)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"unexpected token {token}")
|
|
||||||
|
|
||||||
if current_block:
|
|
||||||
blocks.append(current_block)
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
|
|
||||||
new_spec_str = "".join(token.value for block in blocks for token in block)
|
|
||||||
return new_spec_str if spec_str != new_spec_str else None
|
|
||||||
|
|
||||||
|
|
||||||
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
|
||||||
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that updates formatted spec strings in files."""
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
new_line = lines[line - 1].replace(old, new)
|
|
||||||
if new_line == lines[line - 1]:
|
|
||||||
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
|
||||||
return
|
|
||||||
lines[line - 1] = new_line
|
|
||||||
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
f.writelines(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
|
||||||
has_constant = sys.version_info >= (3, 8)
|
|
||||||
for node in ast.walk(tree):
|
|
||||||
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
|
||||||
current_str = node.value
|
|
||||||
elif not has_constant and isinstance(node, ast.Str):
|
|
||||||
current_str = node.s
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
if not IS_PROBABLY_COMPILER.search(current_str):
|
|
||||||
continue
|
|
||||||
new = _spec_str_format(current_str)
|
|
||||||
if new is not None:
|
|
||||||
handler(path, node.lineno, node.col_offset, current_str, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
|
||||||
queue = [data]
|
|
||||||
seen = set()
|
|
||||||
|
|
||||||
while queue:
|
|
||||||
current = queue.pop(0)
|
|
||||||
if id(current) in seen:
|
|
||||||
continue
|
|
||||||
seen.add(id(current))
|
|
||||||
if isinstance(current, dict):
|
|
||||||
queue.extend(current.values())
|
|
||||||
queue.extend(current.keys())
|
|
||||||
elif isinstance(current, list):
|
|
||||||
queue.extend(current)
|
|
||||||
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
|
||||||
new = _spec_str_format(current)
|
|
||||||
if new is not None:
|
|
||||||
mark = getattr(current, "_start_mark", None)
|
|
||||||
if mark:
|
|
||||||
line, col = mark.line + 1, mark.column + 1
|
|
||||||
else:
|
|
||||||
line, col = 0, 0
|
|
||||||
handler(path, line, col, current, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_spec_strings(
|
|
||||||
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
|
||||||
) -> None:
|
|
||||||
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
|
||||||
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
|
||||||
for path in paths:
|
|
||||||
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
|
||||||
is_python = path.endswith(".py")
|
|
||||||
if not is_json_or_yaml and not is_python:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
# skip files that are likely too large to be user code or config
|
|
||||||
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
|
||||||
warnings.warn(f"skipping {path}: too large.")
|
|
||||||
continue
|
|
||||||
if is_json_or_yaml:
|
|
||||||
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
|
||||||
elif is_python:
|
|
||||||
_spec_str_ast(path, ast.parse(f.read()), handler)
|
|
||||||
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
|
||||||
warnings.warn(f"skipping {path}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def style(parser, args):
|
||||||
if args.spec_strings:
|
|
||||||
if not args.files:
|
|
||||||
tty.die("No files provided to check spec strings.")
|
|
||||||
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
|
||||||
return _check_spec_strings(args.files, handler)
|
|
||||||
|
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,6 @@
|
|||||||
pytest = None # type: ignore
|
pytest = None # type: ignore
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty as tty
|
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
@@ -217,7 +216,7 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
import pytest
|
import pytest
|
||||||
@@ -237,12 +236,6 @@ def unit_test(parser, args, unknown_args):
|
|||||||
pytest_root = spack.extensions.load_extension(args.extension)
|
pytest_root = spack.extensions.load_extension(args.extension)
|
||||||
|
|
||||||
if args.numprocesses is not None and args.numprocesses > 1:
|
if args.numprocesses is not None and args.numprocesses > 1:
|
||||||
try:
|
|
||||||
import xdist # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
tty.error("parallel unit-test requires pytest-xdist module")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
pytest_args.extend(
|
pytest_args.extend(
|
||||||
[
|
[
|
||||||
"--dist",
|
"--dist",
|
||||||
|
|||||||
@@ -2,48 +2,35 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import argparse
|
import argparse
|
||||||
import io
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util.filesystem import visit_directory_tree
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.spec
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.verify
|
import spack.verify
|
||||||
import spack.verify_libraries
|
|
||||||
from spack.cmd.common import arguments
|
|
||||||
|
|
||||||
description = "verify spack installations on disk"
|
description = "check that all spack packages are on disk as installed"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
|
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
setup_parser.parser = subparser
|
||||||
|
|
||||||
def setup_parser(subparser: argparse.ArgumentParser):
|
subparser.add_argument(
|
||||||
global MANIFEST_SUBPARSER
|
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
|
|
||||||
|
|
||||||
MANIFEST_SUBPARSER = sp.add_parser(
|
|
||||||
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
|
|
||||||
)
|
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
|
||||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||||
)
|
)
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
subparser.add_argument(
|
||||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||||
)
|
)
|
||||||
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
subparser.add_argument(
|
||||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
|
type = subparser.add_mutually_exclusive_group()
|
||||||
manifest_sp_type.add_argument(
|
type.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
default="specs",
|
default="specs",
|
||||||
help="treat entries as specs (default)",
|
help="treat entries as specs (default)",
|
||||||
)
|
)
|
||||||
manifest_sp_type.add_argument(
|
type.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--files",
|
"--files",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||||
)
|
)
|
||||||
|
|
||||||
libraries_subparser = sp.add_parser(
|
|
||||||
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments.add_common_arguments(libraries_subparser, ["constraint"])
|
|
||||||
|
|
||||||
|
|
||||||
def verify(parser, args):
|
def verify(parser, args):
|
||||||
cmd = args.verify_command
|
|
||||||
if cmd == "libraries":
|
|
||||||
return verify_libraries(args)
|
|
||||||
elif cmd == "manifest":
|
|
||||||
return verify_manifest(args)
|
|
||||||
parser.error("invalid verify subcommand")
|
|
||||||
|
|
||||||
|
|
||||||
def verify_libraries(args):
|
|
||||||
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
|
|
||||||
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
|
|
||||||
|
|
||||||
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
|
|
||||||
|
|
||||||
errors = 0
|
|
||||||
for spec in specs_from_db:
|
|
||||||
try:
|
|
||||||
pkg = spec.package
|
|
||||||
except Exception:
|
|
||||||
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
|
|
||||||
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
|
|
||||||
if error_msg is not None:
|
|
||||||
errors += 1
|
|
||||||
tty.error(error_msg)
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
|
|
||||||
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
|
|
||||||
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
|
||||||
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
|
|
||||||
)
|
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
|
||||||
|
|
||||||
if not visitor.problems:
|
|
||||||
return None
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
visitor.write(output, indent=4, brief=True)
|
|
||||||
message = output.getvalue().rstrip()
|
|
||||||
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
|
|
||||||
|
|
||||||
|
|
||||||
def verify_manifest(args):
|
|
||||||
"""verify that install directories have not been modified since installation"""
|
|
||||||
local = args.local
|
local = args.local
|
||||||
|
|
||||||
if args.type == "files":
|
if args.type == "files":
|
||||||
if args.all:
|
if args.all:
|
||||||
MANIFEST_SUBPARSER.error("cannot use --all with --files")
|
setup_parser.parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
for file in args.specs_or_files:
|
for file in args.specs_or_files:
|
||||||
results = spack.verify.check_file_manifest(file)
|
results = spack.verify.check_file_manifest(file)
|
||||||
@@ -153,7 +87,8 @@ def verify_manifest(args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
||||||
else:
|
else:
|
||||||
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
|
setup_parser.parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug("Verifying package %s")
|
tty.debug("Verifying package %s")
|
||||||
|
|||||||
@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
|||||||
opt, i, answer = min(result.answers)
|
opt, i, answer = min(result.answers)
|
||||||
name = spec.name
|
name = spec.name
|
||||||
# TODO: Consolidate this code with similar code in solve.py
|
# TODO: Consolidate this code with similar code in solve.py
|
||||||
if spack.repo.PATH.is_virtual(spec.name):
|
if spec.virtual:
|
||||||
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
||||||
name = providers[0]
|
name = providers[0]
|
||||||
|
|
||||||
|
|||||||
@@ -32,10 +32,9 @@
|
|||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
@@ -43,6 +42,7 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
import spack.schema.cdash
|
import spack.schema.cdash
|
||||||
@@ -54,20 +54,17 @@
|
|||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
import spack.schema.include
|
|
||||||
import spack.schema.merged
|
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
import spack.schema.view
|
import spack.schema.view
|
||||||
import spack.util.remote_file_cache as rfc_util
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
from spack.util.cpus import cpus_available
|
|
||||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
|
||||||
|
|
||||||
from .enums import ConfigScopePriority
|
# Hacked yaml for configuration files preserves line numbers.
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.web as web_util
|
||||||
|
from spack.util.cpus import cpus_available
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
SECTION_SCHEMAS: Dict[str, Any] = {
|
SECTION_SCHEMAS: Dict[str, Any] = {
|
||||||
@@ -75,7 +72,6 @@
|
|||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
"env_vars": spack.schema.env_vars.schema,
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
"include": spack.schema.include.schema,
|
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -123,17 +119,6 @@
|
|||||||
#: Type used for raw YAML configuration
|
#: Type used for raw YAML configuration
|
||||||
YamlConfigDict = Dict[str, Any]
|
YamlConfigDict = Dict[str, Any]
|
||||||
|
|
||||||
#: prefix for name of included configuration scopes
|
|
||||||
INCLUDE_SCOPE_PREFIX = "include"
|
|
||||||
|
|
||||||
#: safeguard for recursive includes -- maximum include depth
|
|
||||||
MAX_RECURSIVE_INCLUDES = 100
|
|
||||||
|
|
||||||
|
|
||||||
def _include_cache_location():
|
|
||||||
"""Location to cache included configuration files."""
|
|
||||||
return os.path.join(spack.paths.user_cache_path, "includes")
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str) -> None:
|
||||||
@@ -141,25 +126,6 @@ def __init__(self, name: str) -> None:
|
|||||||
self.writable = False
|
self.writable = False
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
#: names of any included scopes
|
|
||||||
self._included_scopes: Optional[List["ConfigScope"]] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def included_scopes(self) -> List["ConfigScope"]:
|
|
||||||
"""Memoized list of included scopes, in the order they appear in this scope."""
|
|
||||||
if self._included_scopes is None:
|
|
||||||
self._included_scopes = []
|
|
||||||
|
|
||||||
includes = self.get_section("include")
|
|
||||||
if includes:
|
|
||||||
include_paths = [included_path(data) for data in includes["include"]]
|
|
||||||
for path in include_paths:
|
|
||||||
included_scope = include_path_scope(path)
|
|
||||||
if included_scope:
|
|
||||||
self._included_scopes.append(included_scope)
|
|
||||||
|
|
||||||
return self._included_scopes
|
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -442,18 +408,26 @@ def _method(self, *args, **kwargs):
|
|||||||
return _method
|
return _method
|
||||||
|
|
||||||
|
|
||||||
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
|
|
||||||
ScopeWithPriority = Tuple[int, ConfigScope]
|
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
class Configuration:
|
||||||
"""A hierarchical configuration, merging a number of scopes at different priorities."""
|
"""A full Spack configuration, from a hierarchy of config files.
|
||||||
|
|
||||||
|
This class makes it easy to add a new scope on top of an existing one.
|
||||||
|
"""
|
||||||
|
|
||||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||||
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
|
scopes: Dict[str, ConfigScope]
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self, *scopes: ConfigScope) -> None:
|
||||||
self.scopes = lang.PriorityOrderedMapping()
|
"""Initialize a configuration with an initial list of scopes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
scopes: list of scopes to add to this
|
||||||
|
Configuration, ordered from lowest to highest precedence
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.scopes = collections.OrderedDict()
|
||||||
|
for scope in scopes:
|
||||||
|
self.push_scope(scope)
|
||||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||||
|
|
||||||
def ensure_unwrapped(self) -> "Configuration":
|
def ensure_unwrapped(self) -> "Configuration":
|
||||||
@@ -461,59 +435,36 @@ def ensure_unwrapped(self) -> "Configuration":
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def highest(self) -> ConfigScope:
|
def highest(self) -> ConfigScope:
|
||||||
"""Scope with the highest precedence"""
|
"""Scope with highest precedence"""
|
||||||
return next(self.scopes.reversed_values()) # type: ignore
|
return next(reversed(self.scopes.values())) # type: ignore
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def push_scope(
|
def ensure_scope_ordering(self):
|
||||||
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
"""Ensure that scope order matches documented precedent"""
|
||||||
) -> None:
|
# FIXME: We also need to consider that custom configurations and other orderings
|
||||||
"""Adds a scope to the Configuration, at a given priority.
|
# may not be preserved correctly
|
||||||
|
if "command_line" in self.scopes:
|
||||||
|
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
||||||
|
self.scopes["command_line"] = self.remove_scope("command_line")
|
||||||
|
|
||||||
If a priority is not given, it is assumed to be the current highest priority.
|
@_config_mutator
|
||||||
|
def push_scope(self, scope: ConfigScope) -> None:
|
||||||
|
"""Add a higher precedence scope to the Configuration."""
|
||||||
|
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
||||||
|
self.scopes[scope.name] = scope
|
||||||
|
|
||||||
Args:
|
@_config_mutator
|
||||||
scope: scope to be added
|
def pop_scope(self) -> ConfigScope:
|
||||||
priority: priority of the scope
|
"""Remove the highest precedence scope and return it."""
|
||||||
"""
|
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
||||||
# TODO: As a follow on to #48784, change this to create a graph of the
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||||
# TODO: includes AND ensure properly sorted such that the order included
|
return scope
|
||||||
# TODO: at the highest level is reflected in the value of an option that
|
|
||||||
# TODO: is set in multiple included files.
|
|
||||||
# before pushing the scope itself, push any included scopes recursively, at same priority
|
|
||||||
for included_scope in reversed(scope.included_scopes):
|
|
||||||
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
|
||||||
mark = ""
|
|
||||||
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
|
|
||||||
mark = included_scope.path._start_mark # type: ignore
|
|
||||||
raise RecursiveIncludeError(
|
|
||||||
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
|
|
||||||
)
|
|
||||||
|
|
||||||
# record this inclusion so that remove_scope() can use it
|
|
||||||
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
|
||||||
|
|
||||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
|
||||||
self.scopes.add(scope.name, value=scope, priority=priority)
|
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||||
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||||
|
scope = self.scopes.pop(scope_name, None)
|
||||||
try:
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||||
scope = self.scopes.remove(scope_name)
|
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
|
||||||
except KeyError as e:
|
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# transitively remove included scopes
|
|
||||||
for included_scope in scope.included_scopes:
|
|
||||||
assert (
|
|
||||||
included_scope.name in self.scopes
|
|
||||||
), f"Included scope '{included_scope.name}' was never added to configuration!"
|
|
||||||
self.remove_scope(included_scope.name)
|
|
||||||
|
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -522,13 +473,15 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
|||||||
return (s for s in self.scopes.values() if s.writable)
|
return (s for s in self.scopes.values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
"""Writable scope with the highest precedence."""
|
"""Writable scope with highest precedence."""
|
||||||
return next(s for s in self.scopes.reversed_values() if s.writable)
|
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
||||||
|
|
||||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||||
"""Writable non-platform scope with the highest precedence"""
|
"""Writable non-platform scope with highest precedence"""
|
||||||
return next(
|
return next(
|
||||||
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
|
s
|
||||||
|
for s in reversed(self.scopes.values()) # type: ignore
|
||||||
|
if s.writable and not s.is_platform_dependent
|
||||||
)
|
)
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||||
@@ -795,7 +748,7 @@ def override(
|
|||||||
"""
|
"""
|
||||||
if isinstance(path_or_scope, ConfigScope):
|
if isinstance(path_or_scope, ConfigScope):
|
||||||
overrides = path_or_scope
|
overrides = path_or_scope
|
||||||
CONFIG.push_scope(path_or_scope, priority=None)
|
CONFIG.push_scope(path_or_scope)
|
||||||
else:
|
else:
|
||||||
base_name = _OVERRIDES_BASE_NAME
|
base_name = _OVERRIDES_BASE_NAME
|
||||||
# Ensure the new override gets a unique scope name
|
# Ensure the new override gets a unique scope name
|
||||||
@@ -809,7 +762,7 @@ def override(
|
|||||||
break
|
break
|
||||||
|
|
||||||
overrides = InternalConfigScope(scope_name)
|
overrides = InternalConfigScope(scope_name)
|
||||||
CONFIG.push_scope(overrides, priority=None)
|
CONFIG.push_scope(overrides)
|
||||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -819,86 +772,13 @@ def override(
|
|||||||
assert scope is overrides
|
assert scope is overrides
|
||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(
|
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
||||||
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
|
||||||
) -> None:
|
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
import spack.platforms # circular dependency
|
|
||||||
|
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
scope = DirectoryConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||||
)
|
)
|
||||||
cfg.push_scope(scope, priority=priority)
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
#: Class for the relevance of an optional path conditioned on a limited
|
|
||||||
#: python code that evaluates to a boolean and or explicit specification
|
|
||||||
#: as optional.
|
|
||||||
class IncludePath(NamedTuple):
|
|
||||||
path: str
|
|
||||||
when: str
|
|
||||||
sha256: str
|
|
||||||
optional: bool
|
|
||||||
|
|
||||||
|
|
||||||
def included_path(entry: Union[str, dict]) -> IncludePath:
|
|
||||||
"""Convert the included path entry into an IncludePath.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
entry: include configuration entry
|
|
||||||
|
|
||||||
Returns: converted entry, where an empty ``when`` means the path is
|
|
||||||
not conditionally included
|
|
||||||
"""
|
|
||||||
if isinstance(entry, str):
|
|
||||||
return IncludePath(path=entry, sha256="", when="", optional=False)
|
|
||||||
|
|
||||||
path = entry["path"]
|
|
||||||
sha256 = entry.get("sha256", "")
|
|
||||||
when = entry.get("when", "")
|
|
||||||
optional = entry.get("optional", False)
|
|
||||||
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
|
||||||
|
|
||||||
|
|
||||||
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
|
||||||
"""Instantiate an appropriate configuration scope for the given path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
include: optional include path
|
|
||||||
|
|
||||||
Returns: configuration scope
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: included path has an unsupported URL scheme, is required
|
|
||||||
but does not exist; configuration stage directory argument is missing
|
|
||||||
ConfigFileError: unable to access remote configuration file(s)
|
|
||||||
"""
|
|
||||||
# circular dependencies
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
if (not include.when) or spack.spec.eval_conditional(include.when):
|
|
||||||
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
|
||||||
if not config_path:
|
|
||||||
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
|
||||||
|
|
||||||
if os.path.isdir(config_path):
|
|
||||||
# directories are treated as regular ConfigScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
|
||||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
|
||||||
return DirectoryConfigScope(config_name, config_path)
|
|
||||||
|
|
||||||
if os.path.exists(config_path):
|
|
||||||
# files are assumed to be SingleFileScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
|
||||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
|
||||||
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
|
||||||
|
|
||||||
if not include.optional:
|
|
||||||
path = f" at ({config_path})" if config_path != include.path else ""
|
|
||||||
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
@@ -926,17 +806,18 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def create_incremental() -> Generator[Configuration, None, None]:
|
def create() -> Configuration:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
This constructs one instance associated with this module and returns
|
This constructs one instance associated with this module and returns
|
||||||
it. It is bundled inside a function so that configuration can be
|
it. It is bundled inside a function so that configuration can be
|
||||||
initialized lazily.
|
initialized lazily.
|
||||||
"""
|
"""
|
||||||
|
cfg = Configuration()
|
||||||
|
|
||||||
# first do the builtin, hardcoded defaults
|
# first do the builtin, hardcoded defaults
|
||||||
cfg = create_from(
|
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||||
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
|
cfg.push_scope(builtin)
|
||||||
)
|
|
||||||
|
|
||||||
# Builtin paths to configuration files in Spack
|
# Builtin paths to configuration files in Spack
|
||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
@@ -966,29 +847,16 @@ def create_incremental() -> Generator[Configuration, None, None]:
|
|||||||
|
|
||||||
# add each scope and its platform-specific directory
|
# add each scope and its platform-specific directory
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
|
cfg.push_scope(DirectoryConfigScope(name, path))
|
||||||
# Each scope can have per-platform overrides in subdirectories
|
|
||||||
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
|
||||||
|
|
||||||
# yield the config incrementally so that each config level's init code can get
|
# Each scope can have per-platfom overrides in subdirectories
|
||||||
# data from the one below. This can be tricky, but it enables us to have a
|
_add_platform_scope(cfg, name, path)
|
||||||
# single unified config system.
|
|
||||||
#
|
|
||||||
# TODO: think about whether we want to restrict what types of config can be used
|
|
||||||
# at each level. e.g., we may want to just more forcibly disallow remote
|
|
||||||
# config (which uses ssl and other config options) for some of the scopes,
|
|
||||||
# to make the bootstrap issues more explicit, even if allowing config scope
|
|
||||||
# init to reference lower scopes is more flexible.
|
|
||||||
yield cfg
|
|
||||||
|
|
||||||
|
return cfg
|
||||||
def create() -> Configuration:
|
|
||||||
"""Create a configuration using create_incremental(), return the last yielded result."""
|
|
||||||
return list(create_incremental())[-1]
|
|
||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||||
@@ -1084,11 +952,10 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
result = CONFIG.set(path, value, scope)
|
return CONFIG.set(path, value, scope)
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
def scopes() -> Dict[str, ConfigScope]:
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return CONFIG.scopes
|
return CONFIG.scopes
|
||||||
|
|
||||||
@@ -1542,7 +1409,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_configuration(
|
def use_configuration(
|
||||||
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
|
*scopes_or_paths: Union[ConfigScope, str]
|
||||||
) -> Generator[Configuration, None, None]:
|
) -> Generator[Configuration, None, None]:
|
||||||
"""Use the configuration scopes passed as arguments within the context manager.
|
"""Use the configuration scopes passed as arguments within the context manager.
|
||||||
|
|
||||||
@@ -1557,7 +1424,7 @@ def use_configuration(
|
|||||||
global CONFIG
|
global CONFIG
|
||||||
|
|
||||||
# Normalize input and construct a Configuration object
|
# Normalize input and construct a Configuration object
|
||||||
configuration = create_from(*scopes_or_paths)
|
configuration = _config_from(scopes_or_paths)
|
||||||
CONFIG.clear_caches(), configuration.clear_caches()
|
CONFIG.clear_caches(), configuration.clear_caches()
|
||||||
|
|
||||||
saved_config, CONFIG = CONFIG, configuration
|
saved_config, CONFIG = CONFIG, configuration
|
||||||
@@ -1568,44 +1435,137 @@ def use_configuration(
|
|||||||
CONFIG = saved_config
|
CONFIG = saved_config
|
||||||
|
|
||||||
|
|
||||||
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
|
@lang.memoized
|
||||||
if isinstance(entry, tuple):
|
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
||||||
return entry
|
scopes = []
|
||||||
|
for scope_or_path in scopes_or_paths:
|
||||||
default_priority = ConfigScopePriority.CONFIG_FILES
|
# If we have a config scope we are already done
|
||||||
if isinstance(entry, ConfigScope):
|
if isinstance(scope_or_path, ConfigScope):
|
||||||
return default_priority, entry
|
scopes.append(scope_or_path)
|
||||||
|
continue
|
||||||
|
|
||||||
# Otherwise we need to construct it
|
# Otherwise we need to construct it
|
||||||
path = os.path.normpath(entry)
|
path = os.path.normpath(scope_or_path)
|
||||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||||
name = os.path.basename(path)
|
name = os.path.basename(path)
|
||||||
return default_priority, DirectoryConfigScope(name, path)
|
scopes.append(DirectoryConfigScope(name, path))
|
||||||
|
|
||||||
|
configuration = Configuration(*scopes)
|
||||||
|
return configuration
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
def raw_github_gitlab_url(url: str) -> str:
|
||||||
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
|
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||||
"""Creates a configuration object from the scopes passed in input.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
|
url: url to be converted to raw form
|
||||||
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
|
|
||||||
string is given, a DirectoryConfigScope is created from it.
|
|
||||||
|
|
||||||
Examples:
|
Returns:
|
||||||
|
Raw github/gitlab url or the original url
|
||||||
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
|
|
||||||
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
|
|
||||||
>>> cfg = create_from(
|
|
||||||
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
|
|
||||||
... (ConfigScopePriority.BUILTIN, builtin_scope)
|
|
||||||
... )
|
|
||||||
"""
|
"""
|
||||||
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
|
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||||
result = Configuration()
|
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||||
for priority, scope in scopes_with_priority:
|
# removed and or, '/blame' if needed.
|
||||||
result.push_scope(scope, priority=priority)
|
if "github" in url or "gitlab" in url:
|
||||||
return result
|
return url.replace("/blob/", "/raw/")
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def collect_urls(base_url: str) -> list:
|
||||||
|
"""Return a list of configuration URLs.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
base_url: URL for a configuration (yaml) file or a directory
|
||||||
|
containing yaml file(s)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of configuration file(s) or empty list if none
|
||||||
|
"""
|
||||||
|
if not base_url:
|
||||||
|
return []
|
||||||
|
|
||||||
|
extension = ".yaml"
|
||||||
|
|
||||||
|
if base_url.endswith(extension):
|
||||||
|
return [base_url]
|
||||||
|
|
||||||
|
# Collect configuration URLs if the base_url is a "directory".
|
||||||
|
_, links = web_util.spider(base_url, 0)
|
||||||
|
return [link for link in links if link.endswith(extension)]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
||||||
|
"""Retrieve configuration file(s) at the specified URL.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
url: URL for a configuration (yaml) file or a directory containing
|
||||||
|
yaml file(s)
|
||||||
|
dest_dir: destination directory
|
||||||
|
skip_existing: Skip files that already exist in dest_dir if
|
||||||
|
``True``; otherwise, replace those files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the corresponding file if URL is or contains a
|
||||||
|
single file and it is the only file in the destination directory or
|
||||||
|
the root (dest_dir) directory if multiple configuration files exist
|
||||||
|
or are retrieved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _fetch_file(url):
|
||||||
|
raw = raw_github_gitlab_url(url)
|
||||||
|
tty.debug(f"Reading config from url {raw}")
|
||||||
|
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||||
|
|
||||||
|
# Return the local path to the cached configuration file OR to the
|
||||||
|
# directory containing the cached configuration files.
|
||||||
|
config_links = collect_urls(url)
|
||||||
|
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
for config_url in config_links:
|
||||||
|
basename = os.path.basename(config_url)
|
||||||
|
if skip_existing and basename in existing_files:
|
||||||
|
tty.warn(
|
||||||
|
f"Will not fetch configuration from {config_url} since a "
|
||||||
|
f"version already exists in {dest_dir}"
|
||||||
|
)
|
||||||
|
path = os.path.join(dest_dir, basename)
|
||||||
|
else:
|
||||||
|
path = _fetch_file(config_url)
|
||||||
|
|
||||||
|
if path:
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
if paths:
|
||||||
|
return dest_dir if len(paths) > 1 else paths[0]
|
||||||
|
|
||||||
|
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_mark_from_yaml_data(obj):
|
||||||
|
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
|
||||||
|
|
||||||
|
We try the object, and if that fails we try its first member (if it's a container).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
mark if one is found, otherwise None.
|
||||||
|
"""
|
||||||
|
# mark of object itelf
|
||||||
|
mark = getattr(obj, "_start_mark", None)
|
||||||
|
if mark:
|
||||||
|
return mark
|
||||||
|
|
||||||
|
# mark of first member if it is a container
|
||||||
|
if isinstance(obj, (list, dict)):
|
||||||
|
first_member = next(iter(obj), None)
|
||||||
|
if first_member:
|
||||||
|
mark = getattr(first_member, "_start_mark", None)
|
||||||
|
|
||||||
|
return mark
|
||||||
|
|
||||||
|
|
||||||
def determine_number_of_jobs(
|
def determine_number_of_jobs(
|
||||||
@@ -1712,7 +1672,3 @@ def get_path(path, data):
|
|||||||
|
|
||||||
# give up and return None if nothing worked
|
# give up and return None if nothing worked
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class RecursiveIncludeError(spack.error.SpackError):
|
|
||||||
"""Too many levels of recursive includes."""
|
|
||||||
|
|||||||
@@ -41,8 +41,6 @@
|
|||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import spack.repo
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@@ -1126,7 +1124,7 @@ def _add(
|
|||||||
installation_time:
|
installation_time:
|
||||||
Date and time of installation
|
Date and time of installation
|
||||||
allow_missing: if True, don't warn when installation is not found on on disk
|
allow_missing: if True, don't warn when installation is not found on on disk
|
||||||
This is useful when installing specs without build/test deps.
|
This is useful when installing specs without build deps.
|
||||||
"""
|
"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||||
@@ -1146,8 +1144,10 @@ def _add(
|
|||||||
edge.spec,
|
edge.spec,
|
||||||
explicit=False,
|
explicit=False,
|
||||||
installation_time=installation_time,
|
installation_time=installation_time,
|
||||||
# allow missing build / test only deps
|
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||||
allow_missing=allow_missing or edge.depflag & (dt.BUILD | dt.TEST) == edge.depflag,
|
# installed, and its build dep is missing a build dep; there's no need to install
|
||||||
|
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||||
|
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make sure the directory layout agrees whether the spec is installed
|
# Make sure the directory layout agrees whether the spec is installed
|
||||||
@@ -1556,12 +1556,7 @@ def _query(
|
|||||||
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
||||||
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
||||||
# check all the deferred specs *if* the query is virtual.
|
# check all the deferred specs *if* the query is virtual.
|
||||||
if (
|
if not results and query_spec is not None and deferred and query_spec.virtual:
|
||||||
not results
|
|
||||||
and query_spec is not None
|
|
||||||
and deferred
|
|
||||||
and spack.repo.PATH.is_virtual(query_spec.name)
|
|
||||||
):
|
|
||||||
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|||||||
@@ -310,7 +310,7 @@ def find_windows_kit_roots() -> List[str]:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_bin_paths(
|
def find_windows_kit_bin_paths(
|
||||||
kit_base: Union[Optional[str], Optional[list]] = None,
|
kit_base: Union[Optional[str], Optional[list]] = None
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Returns Windows kit bin directory per version"""
|
"""Returns Windows kit bin directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
@@ -325,7 +325,7 @@ def find_windows_kit_bin_paths(
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_lib_paths(
|
def find_windows_kit_lib_paths(
|
||||||
kit_base: Union[Optional[str], Optional[list]] = None,
|
kit_base: Union[Optional[str], Optional[list]] = None
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Returns Windows kit lib directory per version"""
|
"""Returns Windows kit lib directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
@@ -16,7 +15,6 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.symlink
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -72,21 +70,13 @@ def dedupe_paths(paths: List[str]) -> List[str]:
|
|||||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||||
the former."""
|
the former`."""
|
||||||
seen: Dict[Tuple[int, int], str] = {}
|
seen: Dict[Tuple[int, int], str] = {}
|
||||||
|
|
||||||
linked_parent_check = lambda x: any(
|
|
||||||
[llnl.util.symlink.islink(str(y)) for y in pathlib.Path(x).parents]
|
|
||||||
)
|
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
identifier = file_identifier(path)
|
identifier = file_identifier(path)
|
||||||
if identifier not in seen:
|
if identifier not in seen:
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
# we also want to deprioritize paths if they contain a symlink in any parent
|
elif not os.path.islink(path):
|
||||||
# (not just the basedir): e.g. oneapi has "latest/bin",
|
|
||||||
# where "latest" is a symlink to 2025.0"
|
|
||||||
elif not (llnl.util.symlink.islink(path) or linked_parent_check(path)):
|
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
return list(seen.values())
|
return list(seen.values())
|
||||||
|
|
||||||
@@ -253,7 +243,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
|||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
def detect_specs(
|
def detect_specs(
|
||||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str]
|
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||||
) -> List["spack.spec.Spec"]:
|
) -> List["spack.spec.Spec"]:
|
||||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||||
|
|
||||||
@@ -269,8 +259,6 @@ def detect_specs(
|
|||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
from spack.repo import PATH as repo_path
|
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
for candidate_path, items_in_prefix in _group_by_prefix(
|
for candidate_path, items_in_prefix in _group_by_prefix(
|
||||||
llnl.util.lang.dedupe(paths)
|
llnl.util.lang.dedupe(paths)
|
||||||
@@ -317,10 +305,7 @@ def detect_specs(
|
|||||||
|
|
||||||
resolved_specs[spec] = candidate_path
|
resolved_specs[spec] = candidate_path
|
||||||
try:
|
try:
|
||||||
# Validate the spec calling a package specific method
|
spec.validate_detection()
|
||||||
pkg_cls = repo_path.get_pkg_class(spec.name)
|
|
||||||
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
|
|
||||||
validate_fn(spec, spec.extra_attributes)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = (
|
msg = (
|
||||||
f'"{spec}" has been detected on the system but will '
|
f'"{spec}" has been detected on the system but will '
|
||||||
|
|||||||
@@ -462,7 +462,8 @@ def _execute_extends(pkg):
|
|||||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||||
|
|
||||||
pkg.extendees[dep_spec.name] = (dep_spec, when_spec)
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
|
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||||
|
|
||||||
return _execute_extends
|
return _execute_extends
|
||||||
|
|
||||||
@@ -567,7 +568,7 @@ def patch(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_patch(
|
def _execute_patch(
|
||||||
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency],
|
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
|
||||||
) -> None:
|
) -> None:
|
||||||
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
|
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _check_concrete(spec: "spack.spec.Spec") -> None:
|
def _check_concrete(spec):
|
||||||
"""If the spec is not concrete, raise a ValueError"""
|
"""If the spec is not concrete, raise a ValueError"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
||||||
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
|||||||
spec = _get_spec(prefix)
|
spec = _get_spec(prefix)
|
||||||
|
|
||||||
if spec:
|
if spec:
|
||||||
spec.set_prefix(prefix)
|
spec.prefix = prefix
|
||||||
specs.append(spec)
|
specs.append(spec)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class DirectoryLayout:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: str,
|
root,
|
||||||
*,
|
*,
|
||||||
projections: Optional[Dict[str, str]] = None,
|
projections: Optional[Dict[str, str]] = None,
|
||||||
hash_length: Optional[int] = None,
|
hash_length: Optional[int] = None,
|
||||||
@@ -120,17 +120,17 @@ def __init__(
|
|||||||
self.manifest_file_name = "install_manifest.json"
|
self.manifest_file_name = "install_manifest.json"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hidden_file_regexes(self) -> Tuple[str]:
|
def hidden_file_regexes(self):
|
||||||
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
||||||
|
|
||||||
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
def relative_path_for_spec(self, spec):
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
projection = spack.projections.get_projection(self.projections, spec)
|
projection = spack.projections.get_projection(self.projections, spec)
|
||||||
path = spec.format_path(projection)
|
path = spec.format_path(projection)
|
||||||
return str(Path(path))
|
return str(Path(path))
|
||||||
|
|
||||||
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
|
def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
@@ -138,7 +138,7 @@ def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
# the full provenance, so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
spec.to_json(f, hash=ht.dag_hash)
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
|
|
||||||
def write_host_environment(self, spec: "spack.spec.Spec") -> None:
|
def write_host_environment(self, spec):
|
||||||
"""The host environment is a json file with os, kernel, and spack
|
"""The host environment is a json file with os, kernel, and spack
|
||||||
versioning. We use it in the case that an analysis later needs to
|
versioning. We use it in the case that an analysis later needs to
|
||||||
easily access this information.
|
easily access this information.
|
||||||
@@ -148,7 +148,7 @@ def write_host_environment(self, spec: "spack.spec.Spec") -> None:
|
|||||||
with open(env_file, "w", encoding="utf-8") as fd:
|
with open(env_file, "w", encoding="utf-8") as fd:
|
||||||
sjson.dump(environ, fd)
|
sjson.dump(environ, fd)
|
||||||
|
|
||||||
def read_spec(self, path: str) -> "spack.spec.Spec":
|
def read_spec(self, path):
|
||||||
"""Read the contents of a file and parse them as a spec"""
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
try:
|
try:
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
@@ -159,28 +159,26 @@ def read_spec(self, path: str) -> "spack.spec.Spec":
|
|||||||
# Too late for conversion; spec_file_path() already called.
|
# Too late for conversion; spec_file_path() already called.
|
||||||
spec = spack.spec.Spec.from_yaml(f)
|
spec = spack.spec.Spec.from_yaml(f)
|
||||||
else:
|
else:
|
||||||
raise SpecReadError(f"Did not recognize spec file extension: {extension}")
|
raise SpecReadError(
|
||||||
|
"Did not recognize spec file extension:" " {0}".format(extension)
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if spack.config.get("config:debug"):
|
if spack.config.get("config:debug"):
|
||||||
raise
|
raise
|
||||||
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}")
|
raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
|
||||||
|
|
||||||
# Specs read from actual installations are always concrete
|
# Specs read from actual installations are always concrete
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
def spec_file_path(self, spec: "spack.spec.Spec") -> str:
|
def spec_file_path(self, spec):
|
||||||
"""Gets full path to spec file"""
|
"""Gets full path to spec file"""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def deprecated_file_path(
|
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||||
self,
|
|
||||||
deprecated_spec: "spack.spec.Spec",
|
|
||||||
deprecator_spec: Optional["spack.spec.Spec"] = None,
|
|
||||||
) -> str:
|
|
||||||
"""Gets full path to spec file for deprecated spec
|
"""Gets full path to spec file for deprecated spec
|
||||||
|
|
||||||
If the deprecator_spec is provided, use that. Otherwise, assume
|
If the deprecator_spec is provided, use that. Otherwise, assume
|
||||||
@@ -214,16 +212,16 @@ def deprecated_file_path(
|
|||||||
|
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def metadata_path(self, spec: "spack.spec.Spec") -> str:
|
def metadata_path(self, spec):
|
||||||
return os.path.join(spec.prefix, self.metadata_dir)
|
return os.path.join(spec.prefix, self.metadata_dir)
|
||||||
|
|
||||||
def env_metadata_path(self, spec: "spack.spec.Spec") -> str:
|
def env_metadata_path(self, spec):
|
||||||
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
||||||
|
|
||||||
def build_packages_path(self, spec: "spack.spec.Spec") -> str:
|
def build_packages_path(self, spec):
|
||||||
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
||||||
|
|
||||||
def create_install_directory(self, spec: "spack.spec.Spec") -> None:
|
def create_install_directory(self, spec):
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
# Create install directory with properly configured permissions
|
# Create install directory with properly configured permissions
|
||||||
@@ -241,7 +239,7 @@ def create_install_directory(self, spec: "spack.spec.Spec") -> None:
|
|||||||
|
|
||||||
self.write_spec(spec, self.spec_file_path(spec))
|
self.write_spec(spec, self.spec_file_path(spec))
|
||||||
|
|
||||||
def ensure_installed(self, spec: "spack.spec.Spec") -> None:
|
def ensure_installed(self, spec):
|
||||||
"""
|
"""
|
||||||
Throws InconsistentInstallDirectoryError if:
|
Throws InconsistentInstallDirectoryError if:
|
||||||
1. spec prefix does not exist
|
1. spec prefix does not exist
|
||||||
@@ -268,7 +266,7 @@ def ensure_installed(self, spec: "spack.spec.Spec") -> None:
|
|||||||
"Spec file in %s does not match hash!" % spec_file_path
|
"Spec file in %s does not match hash!" % spec_file_path
|
||||||
)
|
)
|
||||||
|
|
||||||
def path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
def path_for_spec(self, spec):
|
||||||
"""Return absolute path from the root to a directory for the spec."""
|
"""Return absolute path from the root to a directory for the spec."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
@@ -279,13 +277,23 @@ def path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
|||||||
assert not path.startswith(self.root)
|
assert not path.startswith(self.root)
|
||||||
return os.path.join(self.root, path)
|
return os.path.join(self.root, path)
|
||||||
|
|
||||||
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None:
|
def remove_install_directory(self, spec, deprecated=False):
|
||||||
"""Removes a prefix and any empty parent directories from the root.
|
"""Removes a prefix and any empty parent directories from the root.
|
||||||
Raised RemoveFailedError if something goes wrong.
|
Raised RemoveFailedError if something goes wrong.
|
||||||
"""
|
"""
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
assert path.startswith(self.root)
|
assert path.startswith(self.root)
|
||||||
|
|
||||||
|
# Windows readonly files cannot be removed by Python
|
||||||
|
# directly, change permissions before attempting to remove
|
||||||
|
if sys.platform == "win32":
|
||||||
|
kwargs = {
|
||||||
|
"ignore_errors": False,
|
||||||
|
"onerror": fs.readonly_file_handler(ignore_errors=False),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
kwargs = {} # the default value for ignore_errors is false
|
||||||
|
|
||||||
if deprecated:
|
if deprecated:
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
@@ -296,16 +304,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
|
|||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
elif os.path.exists(path):
|
elif os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
if sys.platform == "win32":
|
shutil.rmtree(path, **kwargs)
|
||||||
# Windows readonly files cannot be removed by Python
|
|
||||||
# directly, change permissions before attempting to remove
|
|
||||||
shutil.rmtree(
|
|
||||||
path,
|
|
||||||
ignore_errors=False,
|
|
||||||
onerror=fs.readonly_file_handler(ignore_errors=False),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(path)
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
|
|
||||||
|
|||||||
@@ -12,13 +12,3 @@ class InstallRecordStatus(enum.Flag):
|
|||||||
DEPRECATED = enum.auto()
|
DEPRECATED = enum.auto()
|
||||||
MISSING = enum.auto()
|
MISSING = enum.auto()
|
||||||
ANY = INSTALLED | DEPRECATED | MISSING
|
ANY = INSTALLED | DEPRECATED | MISSING
|
||||||
|
|
||||||
|
|
||||||
class ConfigScopePriority(enum.IntEnum):
|
|
||||||
"""Priorities of the different kind of config scopes used by Spack"""
|
|
||||||
|
|
||||||
BUILTIN = 0
|
|
||||||
CONFIG_FILES = 1
|
|
||||||
CUSTOM = 2
|
|
||||||
ENVIRONMENT = 3
|
|
||||||
COMMAND_LINE = 4
|
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ def __init__(
|
|||||||
" ".join(self._install_target(s.safe_name()) for s in item.prereqs),
|
" ".join(self._install_target(s.safe_name()) for s in item.prereqs),
|
||||||
item.target.spec_hash(),
|
item.target.spec_hash(),
|
||||||
item.target.unsafe_format(
|
item.target.unsafe_format(
|
||||||
"{name}{@version}{variants}{ arch=architecture} {%compiler}"
|
"{name}{@version}{%compiler}{variants}{arch=architecture}"
|
||||||
),
|
),
|
||||||
item.buildcache_flag,
|
item.buildcache_flag,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -10,6 +10,8 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
@@ -30,6 +32,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.merged
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.spec_list
|
import spack.spec_list
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -40,6 +43,7 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.url
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
@@ -47,8 +51,6 @@
|
|||||||
from spack.spec_list import SpecList
|
from spack.spec_list import SpecList
|
||||||
from spack.util.path import substitute_path_variables
|
from spack.util.path import substitute_path_variables
|
||||||
|
|
||||||
from ..enums import ConfigScopePriority
|
|
||||||
|
|
||||||
SpecPair = spack.concretize.SpecPair
|
SpecPair = spack.concretize.SpecPair
|
||||||
|
|
||||||
#: environment variable used to indicate the active environment
|
#: environment variable used to indicate the active environment
|
||||||
@@ -385,7 +387,6 @@ def create_in_dir(
|
|||||||
# dev paths in this environment to refer to their original
|
# dev paths in this environment to refer to their original
|
||||||
# locations.
|
# locations.
|
||||||
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
||||||
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
|
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
@@ -402,8 +403,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
dev_path = substitute_path_variables(entry["path"])
|
dev_path = substitute_path_variables(entry["path"])
|
||||||
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
||||||
|
|
||||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
# Skip if the expanded path is the same (e.g. when absolute)
|
||||||
if entry["path"] == expanded_path:
|
if dev_path == expanded_path:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
||||||
@@ -418,34 +419,6 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
env._re_read()
|
env._re_read()
|
||||||
|
|
||||||
|
|
||||||
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
|
|
||||||
"""When initializing the environment from a manifest file and we plan
|
|
||||||
to store the environment in a different directory, we have to rewrite
|
|
||||||
relative repo paths to absolute ones and expand environment variables."""
|
|
||||||
with env:
|
|
||||||
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
|
|
||||||
if not repos_specs:
|
|
||||||
return
|
|
||||||
for i, entry in enumerate(repos_specs):
|
|
||||||
repo_path = substitute_path_variables(entry)
|
|
||||||
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
|
|
||||||
|
|
||||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
|
||||||
if entry == expanded_path:
|
|
||||||
continue
|
|
||||||
|
|
||||||
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
|
|
||||||
|
|
||||||
repos_specs[i] = expanded_path
|
|
||||||
|
|
||||||
spack.config.set("repos", repos_specs, scope=env.scope_name)
|
|
||||||
|
|
||||||
env.repos_specs = None
|
|
||||||
# If we changed the environment's spack.yaml scope, that will not be reflected
|
|
||||||
# in the manifest that we read
|
|
||||||
env._re_read()
|
|
||||||
|
|
||||||
|
|
||||||
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
||||||
"""Returns the directory associated with a named environment.
|
"""Returns the directory associated with a named environment.
|
||||||
|
|
||||||
@@ -573,6 +546,13 @@ def _write_yaml(data, str_or_file):
|
|||||||
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def _eval_conditional(string):
|
||||||
|
"""Evaluate conditional definitions using restricted variable scope."""
|
||||||
|
valid_variables = spack.spec.get_host_environment()
|
||||||
|
valid_variables.update({"re": re, "env": os.environ})
|
||||||
|
return eval(string, valid_variables)
|
||||||
|
|
||||||
|
|
||||||
def _is_dev_spec_and_has_changed(spec):
|
def _is_dev_spec_and_has_changed(spec):
|
||||||
"""Check if the passed spec is a dev build and whether it has changed since the
|
"""Check if the passed spec is a dev build and whether it has changed since the
|
||||||
last installation"""
|
last installation"""
|
||||||
@@ -1005,7 +985,7 @@ def _process_definition(self, entry):
|
|||||||
"""Process a single spec definition item."""
|
"""Process a single spec definition item."""
|
||||||
when_string = entry.get("when")
|
when_string = entry.get("when")
|
||||||
if when_string is not None:
|
if when_string is not None:
|
||||||
when = spack.spec.eval_conditional(when_string)
|
when = _eval_conditional(when_string)
|
||||||
assert len([x for x in entry if x != "when"]) == 1
|
assert len([x for x in entry if x != "when"]) == 1
|
||||||
else:
|
else:
|
||||||
when = True
|
when = True
|
||||||
@@ -1550,6 +1530,9 @@ def _get_specs_to_concretize(
|
|||||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||||
|
|
||||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||||
|
# Avoid cyclic dependency
|
||||||
|
import spack.solver.asp
|
||||||
|
|
||||||
# Exit early if the set of concretized specs is the set of user specs
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
||||||
if not new_user_specs:
|
if not new_user_specs:
|
||||||
@@ -2409,8 +2392,6 @@ def invalidate_repository_cache(self):
|
|||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self._previous_active = _active_environment
|
self._previous_active = _active_environment
|
||||||
if self._previous_active:
|
|
||||||
deactivate()
|
|
||||||
activate(self)
|
activate(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -2660,23 +2641,20 @@ def _ensure_env_dir():
|
|||||||
# error handling for bad manifests is handled on other code paths
|
# error handling for bad manifests is handled on other code paths
|
||||||
return
|
return
|
||||||
|
|
||||||
# TODO: make this recursive
|
|
||||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||||
for include in includes:
|
for include in includes:
|
||||||
included_path = spack.config.included_path(include)
|
if os.path.isabs(include):
|
||||||
path = included_path.path
|
|
||||||
if os.path.isabs(path):
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
abspath = pathlib.Path(os.path.normpath(environment_dir / path))
|
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||||
if common_path != environment_dir:
|
if common_path != environment_dir:
|
||||||
tty.debug(f"Will not copy relative include file from outside environment: {path}")
|
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
orig_abspath = os.path.normpath(envfile.parent / path)
|
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||||
if not os.path.exists(orig_abspath):
|
if not os.path.exists(orig_abspath):
|
||||||
tty.warn(f"Included file does not exist; will not copy: '{path}'")
|
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fs.touchp(abspath)
|
fs.touchp(abspath)
|
||||||
@@ -2899,7 +2877,7 @@ def extract_name(_item):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
condition_str = item.get("when", "True")
|
condition_str = item.get("when", "True")
|
||||||
if not spack.spec.eval_conditional(condition_str):
|
if not _eval_conditional(condition_str):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
yield idx, item
|
yield idx, item
|
||||||
@@ -2960,20 +2938,127 @@ def __iter__(self):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.manifest_file)
|
return str(self.manifest_file)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
|
"""List of included configuration scopes from the manifest.
|
||||||
|
|
||||||
|
Scopes are listed in the YAML file in order from highest to
|
||||||
|
lowest precedence, so configuration from earlier scope will take
|
||||||
|
precedence over later ones.
|
||||||
|
|
||||||
|
This routine returns them in the order they should be pushed onto
|
||||||
|
the internal scope stack (so, in reverse, from lowest to highest).
|
||||||
|
|
||||||
|
Returns: Configuration scopes associated with the environment manifest
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if the manifest includes a remote file but
|
||||||
|
no configuration stage directory has been identified
|
||||||
|
"""
|
||||||
|
scopes: List[spack.config.ConfigScope] = []
|
||||||
|
|
||||||
|
# load config scopes added via 'include:', in reverse so that
|
||||||
|
# highest-precedence scopes are last.
|
||||||
|
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||||
|
missing = []
|
||||||
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
|
config_path = substitute_path_variables(config_path)
|
||||||
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
|
# If scheme is not valid, config_path is not a url
|
||||||
|
# of a type Spack is generally aware
|
||||||
|
if spack.util.url.validate_scheme(include_url.scheme):
|
||||||
|
# Transform file:// URLs to direct includes.
|
||||||
|
if include_url.scheme == "file":
|
||||||
|
config_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
|
||||||
|
# Any other URL should be fetched.
|
||||||
|
elif include_url.scheme in ("http", "https", "ftp"):
|
||||||
|
# Stage any remote configuration file(s)
|
||||||
|
staged_configs = (
|
||||||
|
os.listdir(self.config_stage_dir)
|
||||||
|
if os.path.exists(self.config_stage_dir)
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
remote_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
basename = os.path.basename(remote_path)
|
||||||
|
if basename in staged_configs:
|
||||||
|
# Do NOT re-stage configuration files over existing
|
||||||
|
# ones with the same name since there is a risk of
|
||||||
|
# losing changes (e.g., from 'spack config update').
|
||||||
|
tty.warn(
|
||||||
|
"Will not re-stage configuration from {0} to avoid "
|
||||||
|
"losing changes to the already staged file of the "
|
||||||
|
"same name.".format(remote_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Recognize the configuration stage directory
|
||||||
|
# is flattened to ensure a single copy of each
|
||||||
|
# configuration file.
|
||||||
|
config_path = self.config_stage_dir
|
||||||
|
if basename.endswith(".yaml"):
|
||||||
|
config_path = os.path.join(config_path, basename)
|
||||||
|
else:
|
||||||
|
staged_path = spack.config.fetch_remote_configs(
|
||||||
|
config_path, str(self.config_stage_dir), skip_existing=True
|
||||||
|
)
|
||||||
|
if not staged_path:
|
||||||
|
raise SpackEnvironmentError(
|
||||||
|
"Unable to fetch remote configuration {0}".format(config_path)
|
||||||
|
)
|
||||||
|
config_path = staged_path
|
||||||
|
|
||||||
|
elif include_url.scheme:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||||
|
f"environment include: {config_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# treat relative paths as relative to the environment
|
||||||
|
if not os.path.isabs(config_path):
|
||||||
|
config_path = os.path.join(self.manifest_dir, config_path)
|
||||||
|
config_path = os.path.normpath(os.path.realpath(config_path))
|
||||||
|
|
||||||
|
if os.path.isdir(config_path):
|
||||||
|
# directories are treated as regular ConfigScopes
|
||||||
|
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
||||||
|
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||||
|
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||||
|
elif os.path.exists(config_path):
|
||||||
|
# files are assumed to be SingleFileScopes
|
||||||
|
config_name = f"env:{self.name}:{config_path}"
|
||||||
|
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||||
|
scopes.append(
|
||||||
|
spack.config.SingleFileScope(
|
||||||
|
config_name, config_path, spack.schema.merged.schema
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
missing.append(config_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||||
|
msg += "\n {0}".format("\n ".join(missing))
|
||||||
|
raise spack.config.ConfigFileError(msg)
|
||||||
|
|
||||||
|
return scopes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||||
if self._config_scopes is not None:
|
if self._config_scopes is not None:
|
||||||
return self._config_scopes
|
return self._config_scopes
|
||||||
|
|
||||||
scopes: List[spack.config.ConfigScope] = [
|
scopes: List[spack.config.ConfigScope] = [
|
||||||
|
*self.included_config_scopes,
|
||||||
spack.config.SingleFileScope(
|
spack.config.SingleFileScope(
|
||||||
self.scope_name,
|
self.scope_name,
|
||||||
str(self.manifest_file),
|
str(self.manifest_file),
|
||||||
spack.schema.env.schema,
|
spack.schema.env.schema,
|
||||||
yaml_path=[TOP_LEVEL_KEY],
|
yaml_path=[TOP_LEVEL_KEY],
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
ensure_no_disallowed_env_config_mods(scopes)
|
ensure_no_disallowed_env_config_mods(scopes)
|
||||||
self._config_scopes = scopes
|
self._config_scopes = scopes
|
||||||
@@ -2982,12 +3067,14 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
def prepare_config_scope(self) -> None:
|
def prepare_config_scope(self) -> None:
|
||||||
"""Add the manifest's scopes to the global configuration search path."""
|
"""Add the manifest's scopes to the global configuration search path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
|
spack.config.CONFIG.push_scope(scope)
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
|
|
||||||
def deactivate_config_scope(self) -> None:
|
def deactivate_config_scope(self) -> None:
|
||||||
"""Remove any of the manifest's scopes from the global config path."""
|
"""Remove any of the manifest's scopes from the global config path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.remove_scope(scope.name)
|
spack.config.CONFIG.remove_scope(scope.name)
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_config(self):
|
def use_config(self):
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.config
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
@@ -159,8 +158,7 @@ def activate(
|
|||||||
# become PATH variables.
|
# become PATH variables.
|
||||||
#
|
#
|
||||||
|
|
||||||
with env.manifest.use_config():
|
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||||
env_vars_yaml = spack.config.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
if env_vars_yaml:
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||||
|
|
||||||
@@ -197,8 +195,7 @@ def deactivate() -> EnvironmentModifications:
|
|||||||
if active is None:
|
if active is None:
|
||||||
return env_mods
|
return env_mods
|
||||||
|
|
||||||
with active.manifest.use_config():
|
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||||
env_vars_yaml = spack.config.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
if env_vars_yaml:
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
from typing import Callable, Dict, Optional
|
||||||
from typing import Callable, Dict, List, Optional
|
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
@@ -78,7 +77,7 @@ def view_copy(
|
|||||||
|
|
||||||
# Order of this dict is somewhat irrelevant
|
# Order of this dict is somewhat irrelevant
|
||||||
prefix_to_projection = {
|
prefix_to_projection = {
|
||||||
str(s.prefix): view.get_projection_for_spec(s)
|
s.prefix: view.get_projection_for_spec(s)
|
||||||
for s in spec.traverse(root=True, order="breadth")
|
for s in spec.traverse(root=True, order="breadth")
|
||||||
if not s.external
|
if not s.external
|
||||||
}
|
}
|
||||||
@@ -185,7 +184,7 @@ def __init__(
|
|||||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||||
self._link(src, dst, self, spec)
|
self._link(src, dst, self, spec)
|
||||||
|
|
||||||
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def add_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Add given specs to view.
|
Add given specs to view.
|
||||||
|
|
||||||
@@ -200,19 +199,19 @@ def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def add_standalone(self, spec: spack.spec.Spec) -> bool:
|
def add_standalone(self, spec):
|
||||||
"""
|
"""
|
||||||
Add (link) a standalone package into this view.
|
Add (link) a standalone package into this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def check_added(self, spec: spack.spec.Spec) -> bool:
|
def check_added(self, spec):
|
||||||
"""
|
"""
|
||||||
Check if the given concrete spec is active in this view.
|
Check if the given concrete spec is active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def remove_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Removes given specs from view.
|
Removes given specs from view.
|
||||||
|
|
||||||
@@ -231,25 +230,25 @@ def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_standalone(self, spec: spack.spec.Spec) -> None:
|
def remove_standalone(self, spec):
|
||||||
"""
|
"""
|
||||||
Remove (unlink) a standalone package from this view.
|
Remove (unlink) a standalone package from this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str:
|
def get_projection_for_spec(self, spec):
|
||||||
"""
|
"""
|
||||||
Get the projection in this view for a spec.
|
Get the projection in this view for a spec.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_all_specs(self) -> List[spack.spec.Spec]:
|
def get_all_specs(self):
|
||||||
"""
|
"""
|
||||||
Get all specs currently active in this view.
|
Get all specs currently active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
|
def get_spec(self, spec):
|
||||||
"""
|
"""
|
||||||
Return the actual spec linked in this view (i.e. do not look it up
|
Return the actual spec linked in this view (i.e. do not look it up
|
||||||
in the database by name).
|
in the database by name).
|
||||||
@@ -263,7 +262,7 @@ def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def print_status(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Print a short summary about the given specs, detailing whether..
|
Print a short summary about the given specs, detailing whether..
|
||||||
* ..they are active in the view.
|
* ..they are active in the view.
|
||||||
@@ -643,7 +642,7 @@ def print_status(self, *specs, **kwargs):
|
|||||||
specs.sort()
|
specs.sort()
|
||||||
|
|
||||||
abbreviated = [
|
abbreviated = [
|
||||||
s.cformat("{name}{@version}{compiler_flags}{variants}{%compiler}")
|
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
|
||||||
for s in specs
|
for s in specs
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -694,7 +693,7 @@ def _sanity_check_view_projection(self, specs):
|
|||||||
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
||||||
seen[metadata_dir] = current_spec
|
seen[metadata_dir] = current_spec
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs) -> None:
|
def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||||
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
||||||
assert all((s.concrete for s in specs))
|
assert all((s.concrete for s in specs))
|
||||||
if len(specs) == 0:
|
if len(specs) == 0:
|
||||||
@@ -709,10 +708,7 @@ def add_specs(self, *specs, **kwargs) -> None:
|
|||||||
def skip_list(file):
|
def skip_list(file):
|
||||||
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
|
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
|
||||||
|
|
||||||
# Determine if the root is on a case-insensitive filesystem
|
visitor = SourceMergeVisitor(ignore=skip_list)
|
||||||
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
|
|
||||||
|
|
||||||
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
|
|
||||||
|
|
||||||
# Gather all the directories to be made and files to be linked
|
# Gather all the directories to be made and files to be linked
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
@@ -831,7 +827,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
#####################
|
#####################
|
||||||
# utility functions #
|
# utility functions #
|
||||||
#####################
|
#####################
|
||||||
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]:
|
def get_spec_from_file(filename):
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
@@ -888,8 +884,3 @@ def get_dependencies(specs):
|
|||||||
|
|
||||||
class ConflictingProjectionsError(SpackError):
|
class ConflictingProjectionsError(SpackError):
|
||||||
"""Raised when a view has a projections file and is given one manually."""
|
"""Raised when a view has a projections file and is given one manually."""
|
||||||
|
|
||||||
|
|
||||||
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
|
|
||||||
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
|
|
||||||
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))
|
|
||||||
|
|||||||
@@ -482,7 +482,7 @@ class SimpleDAG(DotGraphBuilder):
|
|||||||
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
|
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
|
||||||
|
|
||||||
def node_entry(self, node):
|
def node_entry(self, node):
|
||||||
format_option = "{name}{@version}{/hash:7}{%compiler}"
|
format_option = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
|
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
|
||||||
|
|
||||||
def edge_entry(self, edge):
|
def edge_entry(self, edge):
|
||||||
@@ -515,7 +515,7 @@ def visit(self, edge):
|
|||||||
super().visit(edge)
|
super().visit(edge)
|
||||||
|
|
||||||
def node_entry(self, node):
|
def node_entry(self, node):
|
||||||
node_str = node.format("{name}{@version}{/hash:7}{%compiler}")
|
node_str = node.format("{name}{@version}{%compiler}{/hash:7}")
|
||||||
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
|
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
|
||||||
if node.dag_hash() in self.main_unified_space:
|
if node.dag_hash() in self.main_unified_space:
|
||||||
options = f'[label="{node_str}", group="main_psid"]'
|
options = f'[label="{node_str}", group="main_psid"]'
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
|
||||||
HASHES = []
|
hashes = []
|
||||||
|
|
||||||
|
|
||||||
class SpecHashDescriptor:
|
class SpecHashDescriptor:
|
||||||
@@ -23,7 +23,7 @@ def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
|||||||
self.depflag = depflag
|
self.depflag = depflag
|
||||||
self.package_hash = package_hash
|
self.package_hash = package_hash
|
||||||
self.name = name
|
self.name = name
|
||||||
HASHES.append(self)
|
hashes.append(self)
|
||||||
# Allow spec hashes to have an alternate computation method
|
# Allow spec hashes to have an alternate computation method
|
||||||
self.override = override
|
self.override = override
|
||||||
|
|
||||||
@@ -43,9 +43,13 @@ def __repr__(self):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#: The DAG hash includes all inputs that can affect how a package is built.
|
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||||
dag_hash = SpecHashDescriptor(
|
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="hash"
|
|
||||||
|
|
||||||
|
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||||
|
process_hash = SpecHashDescriptor(
|
||||||
|
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,198 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
import io
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.verify_libraries
|
import spack.util.elf as elf
|
||||||
|
|
||||||
|
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
||||||
|
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
||||||
|
#: default search paths of the dynamic linker.
|
||||||
|
ALLOW_UNRESOLVED = [
|
||||||
|
# kernel
|
||||||
|
"linux-vdso.so.*",
|
||||||
|
"libselinux.so.*",
|
||||||
|
# musl libc
|
||||||
|
"ld-musl-*.so.*",
|
||||||
|
# glibc
|
||||||
|
"ld-linux*.so.*",
|
||||||
|
"ld64.so.*",
|
||||||
|
"libanl.so.*",
|
||||||
|
"libc.so.*",
|
||||||
|
"libdl.so.*",
|
||||||
|
"libm.so.*",
|
||||||
|
"libmemusage.so.*",
|
||||||
|
"libmvec.so.*",
|
||||||
|
"libnsl.so.*",
|
||||||
|
"libnss_compat.so.*",
|
||||||
|
"libnss_db.so.*",
|
||||||
|
"libnss_dns.so.*",
|
||||||
|
"libnss_files.so.*",
|
||||||
|
"libnss_hesiod.so.*",
|
||||||
|
"libpcprofile.so.*",
|
||||||
|
"libpthread.so.*",
|
||||||
|
"libresolv.so.*",
|
||||||
|
"librt.so.*",
|
||||||
|
"libSegFault.so.*",
|
||||||
|
"libthread_db.so.*",
|
||||||
|
"libutil.so.*",
|
||||||
|
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
||||||
|
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
||||||
|
"libasan.so.*",
|
||||||
|
"libatomic.so.*",
|
||||||
|
"libcc1.so.*",
|
||||||
|
"libgcc_s.so.*",
|
||||||
|
"libgfortran.so.*",
|
||||||
|
"libgomp.so.*",
|
||||||
|
"libitm.so.*",
|
||||||
|
"liblsan.so.*",
|
||||||
|
"libquadmath.so.*",
|
||||||
|
"libssp.so.*",
|
||||||
|
"libstdc++.so.*",
|
||||||
|
"libtsan.so.*",
|
||||||
|
"libubsan.so.*",
|
||||||
|
# systemd
|
||||||
|
"libudev.so.*",
|
||||||
|
# cuda driver
|
||||||
|
"libcuda.so.*",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
||||||
|
return (
|
||||||
|
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
||||||
|
and parent.is_little_endian == child.is_little_endian
|
||||||
|
and parent.is_64_bit == child.is_64_bit
|
||||||
|
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
with open(candidate_path, "rb") as g:
|
||||||
|
return is_compatible(current_elf, elf.parse_elf(g))
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Problem:
|
||||||
|
def __init__(
|
||||||
|
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
||||||
|
) -> None:
|
||||||
|
self.resolved = resolved
|
||||||
|
self.unresolved = unresolved
|
||||||
|
self.relative_rpaths = relative_rpaths
|
||||||
|
|
||||||
|
|
||||||
|
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
||||||
|
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
||||||
|
self.problems: Dict[str, Problem] = {}
|
||||||
|
self._allow_unresolved_regex = re.compile(
|
||||||
|
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
||||||
|
)
|
||||||
|
|
||||||
|
def allow_unresolved(self, needed: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
name = needed.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return False
|
||||||
|
return bool(self._allow_unresolved_regex.match(name))
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# We work with byte strings for paths.
|
||||||
|
path = os.path.join(root, rel_path).encode("utf-8")
|
||||||
|
|
||||||
|
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
||||||
|
origin = os.path.dirname(path)
|
||||||
|
|
||||||
|
# Retrieve the needed libs + rpaths.
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
# Not dealing with an invalid ELF file.
|
||||||
|
return
|
||||||
|
|
||||||
|
# If there's no needed libs all is good
|
||||||
|
if not parsed_elf.has_needed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the needed libs and rpaths (notice: byte strings)
|
||||||
|
# Don't force an encoding cause paths are just a bag of bytes.
|
||||||
|
needed_libs = parsed_elf.dt_needed_strs
|
||||||
|
|
||||||
|
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
||||||
|
|
||||||
|
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
||||||
|
# supported in general. Also remove empty paths.
|
||||||
|
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
||||||
|
|
||||||
|
# Do not allow relative rpaths (they are relative to the current working directory)
|
||||||
|
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
||||||
|
|
||||||
|
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
||||||
|
# a search.
|
||||||
|
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
||||||
|
|
||||||
|
# Do not allow relative paths in direct libs (they are relative to the current working
|
||||||
|
# directory)
|
||||||
|
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
||||||
|
|
||||||
|
resolved: Dict[bytes, bytes] = {}
|
||||||
|
|
||||||
|
for lib in search_libs:
|
||||||
|
if self.allow_unresolved(lib):
|
||||||
|
continue
|
||||||
|
for rpath in rpaths:
|
||||||
|
candidate = os.path.join(rpath, lib)
|
||||||
|
if candidate_matches(parsed_elf, candidate):
|
||||||
|
resolved[lib] = candidate
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
# Check if directly opened libs are compatible
|
||||||
|
for lib in direct_libs:
|
||||||
|
if candidate_matches(parsed_elf, lib):
|
||||||
|
resolved[lib] = lib
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
if unresolved or relative_rpaths:
|
||||||
|
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
# There can be binaries in .spack/test which shouldn't be checked.
|
||||||
|
if rel_path == ".spack":
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class CannotLocateSharedLibraries(spack.error.SpackError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
|
||||||
|
try:
|
||||||
|
return byte_str.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return byte_str
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
@@ -20,23 +204,36 @@ def post_install(spec, explicit):
|
|||||||
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
||||||
return
|
return
|
||||||
|
|
||||||
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
visitor = ResolveSharedElfLibDepsVisitor(
|
||||||
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
||||||
)
|
)
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
|
# All good?
|
||||||
if not visitor.problems:
|
if not visitor.problems:
|
||||||
return
|
return
|
||||||
|
|
||||||
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n")
|
# For now just list the issues (print it in ldd style, except we don't recurse)
|
||||||
visitor.write(output)
|
output = io.StringIO()
|
||||||
|
output.write("not all executables and libraries can resolve their dependencies:\n")
|
||||||
|
for path, problem in visitor.problems.items():
|
||||||
|
output.write(path)
|
||||||
|
output.write("\n")
|
||||||
|
for needed, full_path in problem.resolved.items():
|
||||||
|
output.write(" ")
|
||||||
|
if needed == full_path:
|
||||||
|
output.write(maybe_decode(needed))
|
||||||
|
else:
|
||||||
|
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
|
||||||
|
output.write("\n")
|
||||||
|
for not_found in problem.unresolved:
|
||||||
|
output.write(f" {maybe_decode(not_found)} => not found\n")
|
||||||
|
for relative_rpath in problem.relative_rpaths:
|
||||||
|
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
|
||||||
|
|
||||||
message = output.getvalue().strip()
|
message = output.getvalue().strip()
|
||||||
|
|
||||||
if policy == "error":
|
if policy == "error":
|
||||||
raise CannotLocateSharedLibraries(message)
|
raise CannotLocateSharedLibraries(message)
|
||||||
|
|
||||||
tty.warn(message)
|
tty.warn(message)
|
||||||
|
|
||||||
|
|
||||||
class CannotLocateSharedLibraries(spack.error.SpackError):
|
|
||||||
pass
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
from llnl.util.lang import nullcontext
|
from llnl.util.lang import nullcontext
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -397,7 +398,7 @@ def stand_alone_tests(self, kwargs):
|
|||||||
Args:
|
Args:
|
||||||
kwargs (dict): arguments to be used by the test process
|
kwargs (dict): arguments to be used by the test process
|
||||||
"""
|
"""
|
||||||
import spack.build_environment # avoid circular dependency
|
import spack.build_environment
|
||||||
|
|
||||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||||
|
|
||||||
@@ -462,8 +463,6 @@ def write_tested_status(self):
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||||
import spack.build_environment # avoid circular dependency
|
|
||||||
|
|
||||||
wdir = "." if work_dir is None else work_dir
|
wdir = "." if work_dir is None else work_dir
|
||||||
tester = pkg.tester
|
tester = pkg.tester
|
||||||
assert test_name and test_name.startswith(
|
assert test_name and test_name.startswith(
|
||||||
|
|||||||
@@ -47,8 +47,6 @@
|
|||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
|
|
||||||
from .enums import ConfigScopePriority
|
|
||||||
|
|
||||||
#: names of profile statistics
|
#: names of profile statistics
|
||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
|
|
||||||
@@ -874,19 +872,14 @@ def add_command_line_scopes(
|
|||||||
scopes = ev.environment_path_scopes(name, path)
|
scopes = ev.environment_path_scopes(name, path)
|
||||||
if scopes is None:
|
if scopes is None:
|
||||||
if os.path.isdir(path): # directory with config files
|
if os.path.isdir(path): # directory with config files
|
||||||
cfg.push_scope(
|
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
|
||||||
spack.config.DirectoryConfigScope(name, path, writable=False),
|
spack.config._add_platform_scope(cfg, name, path, writable=False)
|
||||||
priority=ConfigScopePriority.CUSTOM,
|
|
||||||
)
|
|
||||||
spack.config._add_platform_scope(
|
|
||||||
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||||
|
|
||||||
for scope in scopes:
|
for scope in scopes:
|
||||||
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
def _main(argv=None):
|
def _main(argv=None):
|
||||||
@@ -959,9 +952,7 @@ def _main(argv=None):
|
|||||||
# Push scopes from the command line last
|
# Push scopes from the command line last
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||||
spack.config.CONFIG.push_scope(
|
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||||
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
|
|
||||||
)
|
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
|
||||||
@@ -1007,7 +998,6 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
|
|||||||
args, unknown = parser.parse_known_args(main_args.command)
|
args, unknown = parser.parse_known_args(main_args.command)
|
||||||
# we need to inherit verbose since the install command checks for it
|
# we need to inherit verbose since the install command checks for it
|
||||||
args.verbose = main_args.verbose
|
args.verbose = main_args.verbose
|
||||||
args.lines = main_args.lines
|
|
||||||
|
|
||||||
# Now that we know what command this is and what its args are, determine
|
# Now that we know what command this is and what its args are, determine
|
||||||
# whether we can continue with a bad environment and raise if not.
|
# whether we can continue with a bad environment and raise if not.
|
||||||
|
|||||||
@@ -330,17 +330,18 @@ class BaseConfiguration:
|
|||||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||||
|
|
||||||
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
|
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
|
||||||
|
# Module where type(self) is defined
|
||||||
|
m = inspect.getmodule(self)
|
||||||
|
assert m is not None # make mypy happy
|
||||||
|
self.module = m
|
||||||
# Spec for which we want to generate a module file
|
# Spec for which we want to generate a module file
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.name = module_set_name
|
self.name = module_set_name
|
||||||
self.explicit = explicit
|
self.explicit = explicit
|
||||||
# Dictionary of configuration options that should be applied to the spec
|
# Dictionary of configuration options that should be applied
|
||||||
|
# to the spec
|
||||||
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
|
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
return inspect.getmodule(self)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def projections(self):
|
def projections(self):
|
||||||
"""Projection from specs to module names"""
|
"""Projection from specs to module names"""
|
||||||
@@ -564,12 +565,6 @@ def __init__(self, configuration):
|
|||||||
def spec(self):
|
def spec(self):
|
||||||
return self.conf.spec
|
return self.conf.spec
|
||||||
|
|
||||||
@tengine.context_property
|
|
||||||
def tags(self):
|
|
||||||
if not hasattr(self.spec.package, "tags"):
|
|
||||||
return []
|
|
||||||
return self.spec.package.tags
|
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def timestamp(self):
|
def timestamp(self):
|
||||||
return datetime.datetime.now()
|
return datetime.datetime.now()
|
||||||
@@ -780,6 +775,10 @@ def __init__(
|
|||||||
) -> None:
|
) -> None:
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
|
|
||||||
|
# This class is meant to be derived. Get the module of the
|
||||||
|
# actual writer.
|
||||||
|
self.module = inspect.getmodule(self)
|
||||||
|
assert self.module is not None # make mypy happy
|
||||||
m = self.module
|
m = self.module
|
||||||
|
|
||||||
# Create the triplet of configuration/layout/context
|
# Create the triplet of configuration/layout/context
|
||||||
@@ -817,10 +816,6 @@ def __init__(
|
|||||||
name = type(self).__name__
|
name = type(self).__name__
|
||||||
raise ModulercHeaderNotDefined(msg.format(name))
|
raise ModulercHeaderNotDefined(msg.format(name))
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
return inspect.getmodule(self)
|
|
||||||
|
|
||||||
def _get_template(self):
|
def _get_template(self):
|
||||||
"""Gets the template that will be rendered for this spec."""
|
"""Gets the template that will be rendered for this spec."""
|
||||||
# Get templates and put them in the order of importance:
|
# Get templates and put them in the order of importance:
|
||||||
|
|||||||
@@ -209,7 +209,7 @@ def provides(self):
|
|||||||
# All the other tokens in the hierarchy must be virtual dependencies
|
# All the other tokens in the hierarchy must be virtual dependencies
|
||||||
for x in self.hierarchy_tokens:
|
for x in self.hierarchy_tokens:
|
||||||
if self.spec.package.provides(x):
|
if self.spec.package.provides(x):
|
||||||
provides[x] = self.spec
|
provides[x] = self.spec[x]
|
||||||
return provides
|
return provides
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -48,7 +48,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
import spack.variant
|
import spack.variant
|
||||||
@@ -126,10 +125,9 @@ def windows_establish_runtime_linkage(self):
|
|||||||
# Spack should in general not modify things it has not installed
|
# Spack should in general not modify things it has not installed
|
||||||
# we can reasonably expect externals to have their link interface properly established
|
# we can reasonably expect externals to have their link interface properly established
|
||||||
if sys.platform == "win32" and not self.spec.external:
|
if sys.platform == "win32" and not self.spec.external:
|
||||||
win_rpath = fsys.WindowsSimulatedRPath(self)
|
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||||
win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||||
win_rpath.add_rpath(*self.win_add_rpath())
|
self.win_rpath.establish_link()
|
||||||
win_rpath.establish_link()
|
|
||||||
|
|
||||||
|
|
||||||
#: Registers which are the detectable packages, by repo and package name
|
#: Registers which are the detectable packages, by repo and package name
|
||||||
@@ -744,6 +742,7 @@ def __init__(self, spec):
|
|||||||
# Set up timing variables
|
# Set up timing variables
|
||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> "PackageBase":
|
def __getitem__(self, key: str) -> "PackageBase":
|
||||||
@@ -1288,13 +1287,12 @@ def extendee_spec(self):
|
|||||||
if not self.extendees:
|
if not self.extendees:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
deps = []
|
||||||
|
|
||||||
# If the extendee is in the spec's deps already, return that.
|
# If the extendee is in the spec's deps already, return that.
|
||||||
deps = [
|
for dep in self.spec.traverse(deptype=("link", "run")):
|
||||||
dep
|
if dep.name in self.extendees:
|
||||||
for dep in self.spec.dependencies(deptype=("link", "run"))
|
deps.append(dep)
|
||||||
for d, when in self.extendees.values()
|
|
||||||
if dep.satisfies(d) and self.spec.satisfies(when)
|
|
||||||
]
|
|
||||||
|
|
||||||
if deps:
|
if deps:
|
||||||
assert len(deps) == 1
|
assert len(deps) == 1
|
||||||
@@ -1371,14 +1369,6 @@ def prefix(self):
|
|||||||
def home(self):
|
def home(self):
|
||||||
return self.prefix
|
return self.prefix
|
||||||
|
|
||||||
@property
|
|
||||||
def command(self) -> spack.util.executable.Executable:
|
|
||||||
"""Returns the main executable for this package."""
|
|
||||||
path = os.path.join(self.home.bin, self.spec.name)
|
|
||||||
if fsys.is_exe(path):
|
|
||||||
return spack.util.executable.Executable(path)
|
|
||||||
raise RuntimeError(f"Unable to locate {self.spec.name} command in {self.home.bin}")
|
|
||||||
|
|
||||||
@property # type: ignore[misc]
|
@property # type: ignore[misc]
|
||||||
@memoized
|
@memoized
|
||||||
def compiler(self):
|
def compiler(self):
|
||||||
|
|||||||
@@ -83,7 +83,6 @@ def __init__(
|
|||||||
level: int,
|
level: int,
|
||||||
working_dir: str,
|
working_dir: str,
|
||||||
reverse: bool = False,
|
reverse: bool = False,
|
||||||
ordering_key: Optional[Tuple[str, int]] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize a new Patch instance.
|
"""Initialize a new Patch instance.
|
||||||
|
|
||||||
@@ -93,7 +92,6 @@ def __init__(
|
|||||||
level: patch level
|
level: patch level
|
||||||
working_dir: relative path *within* the stage to change to
|
working_dir: relative path *within* the stage to change to
|
||||||
reverse: reverse the patch
|
reverse: reverse the patch
|
||||||
ordering_key: key used to ensure patches are applied in a consistent order
|
|
||||||
"""
|
"""
|
||||||
# validate level (must be an integer >= 0)
|
# validate level (must be an integer >= 0)
|
||||||
if not isinstance(level, int) or not level >= 0:
|
if not isinstance(level, int) or not level >= 0:
|
||||||
@@ -107,13 +105,6 @@ def __init__(
|
|||||||
self.working_dir = working_dir
|
self.working_dir = working_dir
|
||||||
self.reverse = reverse
|
self.reverse = reverse
|
||||||
|
|
||||||
# The ordering key is passed when executing package.py directives, and is only relevant
|
|
||||||
# after a solve to build concrete specs with consistently ordered patches. For concrete
|
|
||||||
# specs read from a file, we add patches in the order of its patches variants and the
|
|
||||||
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
|
|
||||||
# on whether ordering_key is None where it's used, just to make static analysis happy.
|
|
||||||
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
|
|
||||||
|
|
||||||
def apply(self, stage: "spack.stage.Stage") -> None:
|
def apply(self, stage: "spack.stage.Stage") -> None:
|
||||||
"""Apply a patch to source in a stage.
|
"""Apply a patch to source in a stage.
|
||||||
|
|
||||||
@@ -211,8 +202,9 @@ def __init__(
|
|||||||
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key)
|
super().__init__(pkg, abs_path, level, working_dir, reverse)
|
||||||
self.path = abs_path
|
self.path = abs_path
|
||||||
|
self.ordering_key = ordering_key
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sha256(self) -> str:
|
def sha256(self) -> str:
|
||||||
@@ -274,11 +266,13 @@ def __init__(
|
|||||||
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
||||||
(only required for compressed URL patches)
|
(only required for compressed URL patches)
|
||||||
"""
|
"""
|
||||||
super().__init__(pkg, url, level, working_dir, reverse, ordering_key)
|
super().__init__(pkg, url, level, working_dir, reverse)
|
||||||
|
|
||||||
self.url = url
|
self.url = url
|
||||||
self._stage: Optional["spack.stage.Stage"] = None
|
self._stage: Optional["spack.stage.Stage"] = None
|
||||||
|
|
||||||
|
self.ordering_key = ordering_key
|
||||||
|
|
||||||
if allowed_archive(self.url) and not archive_sha256:
|
if allowed_archive(self.url) and not archive_sha256:
|
||||||
raise spack.error.PatchDirectiveError(
|
raise spack.error.PatchDirectiveError(
|
||||||
"Compressed patches require 'archive_sha256' "
|
"Compressed patches require 'archive_sha256' "
|
||||||
|
|||||||
@@ -108,8 +108,6 @@ def _get_user_cache_path():
|
|||||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||||
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
||||||
|
|
||||||
#: concretization cache for Spack concretizations
|
|
||||||
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
|
|
||||||
|
|
||||||
# Below paths pull configuration from the host environment.
|
# Below paths pull configuration from the host environment.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -283,21 +283,21 @@ def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix)
|
|||||||
def is_macho_magic(magic: bytes) -> bool:
|
def is_macho_magic(magic: bytes) -> bool:
|
||||||
return (
|
return (
|
||||||
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
|
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
|
||||||
magic.startswith(b"\xcf\xfa\xed\xfe")
|
magic.startswith(b"\xCF\xFA\xED\xFE")
|
||||||
or magic.startswith(b"\xfe\xed\xfa\xcf")
|
or magic.startswith(b"\xFE\xED\xFA\xCF")
|
||||||
or magic.startswith(b"\xce\xfa\xed\xfe")
|
or magic.startswith(b"\xCE\xFA\xED\xFE")
|
||||||
or magic.startswith(b"\xfe\xed\xfa\xce")
|
or magic.startswith(b"\xFE\xED\xFA\xCE")
|
||||||
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
|
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
|
||||||
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
|
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
|
||||||
# the number of binaries; in JVM class files it's the java version number. We assume there
|
# the number of binaries; in JVM class files it's the java version number. We assume there
|
||||||
# are less than 10 binaries in a universal binary.
|
# are less than 10 binaries in a universal binary.
|
||||||
or (magic.startswith(b"\xca\xfe\xba\xbe") and int.from_bytes(magic[4:8], "big") < 10)
|
or (magic.startswith(b"\xCA\xFE\xBA\xBE") and int.from_bytes(magic[4:8], "big") < 10)
|
||||||
or (magic.startswith(b"\xbe\xba\xfe\xca") and int.from_bytes(magic[4:8], "little") < 10)
|
or (magic.startswith(b"\xBE\xBA\xFE\xCA") and int.from_bytes(magic[4:8], "little") < 10)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_elf_magic(magic: bytes) -> bool:
|
def is_elf_magic(magic: bytes) -> bool:
|
||||||
return magic.startswith(b"\x7fELF")
|
return magic.startswith(b"\x7FELF")
|
||||||
|
|
||||||
|
|
||||||
def is_binary(filename: str) -> bool:
|
def is_binary(filename: str) -> bool:
|
||||||
@@ -406,8 +406,8 @@ def fixup_macos_rpaths(spec):
|
|||||||
entries which makes it harder to adjust with ``install_name_tool
|
entries which makes it harder to adjust with ``install_name_tool
|
||||||
-delete_rpath``.
|
-delete_rpath``.
|
||||||
"""
|
"""
|
||||||
if spec.external or not spec.concrete:
|
if spec.external or spec.virtual:
|
||||||
tty.warn("external/abstract spec cannot be fixed up: {0!s}".format(spec))
|
tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if "platform=darwin" not in spec:
|
if "platform=darwin" not in spec:
|
||||||
|
|||||||
@@ -32,7 +32,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -50,8 +49,6 @@
|
|||||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||||
|
|
||||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
|
||||||
|
|
||||||
|
|
||||||
def python_package_for_repo(namespace):
|
def python_package_for_repo(namespace):
|
||||||
"""Returns the full namespace of a repository, given its relative one
|
"""Returns the full namespace of a repository, given its relative one
|
||||||
@@ -912,52 +909,19 @@ def __reduce__(self):
|
|||||||
return RepoPath.unmarshal, self.marshal()
|
return RepoPath.unmarshal, self.marshal()
|
||||||
|
|
||||||
|
|
||||||
def _parse_package_api_version(
|
|
||||||
config: Dict[str, Any],
|
|
||||||
min_api: Tuple[int, int] = spack.min_package_api_version,
|
|
||||||
max_api: Tuple[int, int] = spack.package_api_version,
|
|
||||||
) -> Tuple[int, int]:
|
|
||||||
api = config.get("api")
|
|
||||||
if api is None:
|
|
||||||
package_api = (1, 0)
|
|
||||||
else:
|
|
||||||
if not isinstance(api, str):
|
|
||||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
|
||||||
api_match = _API_REGEX.match(api)
|
|
||||||
if api_match is None:
|
|
||||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
|
||||||
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
|
||||||
|
|
||||||
if min_api <= package_api <= max_api:
|
|
||||||
return package_api
|
|
||||||
|
|
||||||
min_str = ".".join(str(i) for i in min_api)
|
|
||||||
max_str = ".".join(str(i) for i in max_api)
|
|
||||||
curr_str = ".".join(str(i) for i in package_api)
|
|
||||||
raise BadRepoError(
|
|
||||||
f"Package API v{curr_str} is not supported by this version of Spack ("
|
|
||||||
f"must be between v{min_str} and v{max_str})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Repo:
|
class Repo:
|
||||||
"""Class representing a package repository in the filesystem.
|
"""Class representing a package repository in the filesystem.
|
||||||
|
|
||||||
Each package repository must have a top-level configuration file called `repo.yaml`.
|
Each package repository must have a top-level configuration file
|
||||||
|
called `repo.yaml`.
|
||||||
|
|
||||||
It contains the following keys:
|
Currently, `repo.yaml` must define:
|
||||||
|
|
||||||
`namespace`:
|
`namespace`:
|
||||||
A Python namespace where the repository's packages should live.
|
A Python namespace where the repository's packages should live.
|
||||||
|
|
||||||
`subdirectory`:
|
`subdirectory`:
|
||||||
An optional subdirectory name where packages are placed
|
An optional subdirectory name where packages are placed
|
||||||
|
|
||||||
`api`:
|
|
||||||
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
|
||||||
For the repo to be compatible with the current version of Spack, the version must be
|
|
||||||
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
|
||||||
:py:data:`spack.package_api_version`.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -994,7 +958,7 @@ def check(condition, msg):
|
|||||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.namespace: str = config["namespace"]
|
self.namespace = config["namespace"]
|
||||||
check(
|
check(
|
||||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||||
@@ -1007,14 +971,12 @@ def check(condition, msg):
|
|||||||
# Keep name components around for checking prefixes.
|
# Keep name components around for checking prefixes.
|
||||||
self._names = self.full_namespace.split(".")
|
self._names = self.full_namespace.split(".")
|
||||||
|
|
||||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||||
self.packages_path = os.path.join(self.root, packages_dir)
|
self.packages_path = os.path.join(self.root, packages_dir)
|
||||||
check(
|
check(
|
||||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.package_api = _parse_package_api_version(config)
|
|
||||||
|
|
||||||
# Class attribute overrides by package name
|
# Class attribute overrides by package name
|
||||||
self.overrides = overrides or {}
|
self.overrides = overrides or {}
|
||||||
|
|
||||||
@@ -1064,7 +1026,7 @@ def is_prefix(self, fullname: str) -> bool:
|
|||||||
parts = fullname.split(".")
|
parts = fullname.split(".")
|
||||||
return self._names[: len(parts)] == parts
|
return self._names[: len(parts)] == parts
|
||||||
|
|
||||||
def _read_config(self) -> Dict[str, Any]:
|
def _read_config(self) -> Dict[str, str]:
|
||||||
"""Check for a YAML config file in this db's root directory."""
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
try:
|
try:
|
||||||
with open(self.config_file, encoding="utf-8") as reponame_file:
|
with open(self.config_file, encoding="utf-8") as reponame_file:
|
||||||
@@ -1406,8 +1368,6 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
config.write(f" namespace: '{namespace}'\n")
|
config.write(f" namespace: '{namespace}'\n")
|
||||||
if subdir != packages_dir_name:
|
if subdir != packages_dir_name:
|
||||||
config.write(f" subdirectory: '{subdir}'\n")
|
config.write(f" subdirectory: '{subdir}'\n")
|
||||||
x, y = spack.package_api_version
|
|
||||||
config.write(f" api: v{x}.{y}\n")
|
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
# try to clean up.
|
# try to clean up.
|
||||||
|
|||||||
@@ -177,7 +177,7 @@ def build_report_for_package(self, report_dir, package, duration):
|
|||||||
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
|
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
|
||||||
# "update" was encounterd.
|
# "update" was encounterd.
|
||||||
# dump the report in the configure line so teams can see what the issue is
|
# dump the report in the configure line so teams can see what the issue is
|
||||||
if len(phases_encountered) == 1 and package.get("exception"):
|
if len(phases_encountered) == 1 and package["exception"]:
|
||||||
# TODO this mapping is not ideal since these are pre-configure errors
|
# TODO this mapping is not ideal since these are pre-configure errors
|
||||||
# we need to determine if a more appropriate cdash phase can be utilized
|
# we need to determine if a more appropriate cdash phase can be utilized
|
||||||
# for now we will add a message to the log explaining this
|
# for now we will add a message to the log explaining this
|
||||||
|
|||||||
@@ -7,7 +7,8 @@
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
import jsonschema.validators
|
|
||||||
|
import llnl.util.lang
|
||||||
|
|
||||||
from spack.error import SpecSyntaxError
|
from spack.error import SpecSyntaxError
|
||||||
|
|
||||||
@@ -17,24 +18,22 @@ class DeprecationMessage(typing.NamedTuple):
|
|||||||
error: bool
|
error: bool
|
||||||
|
|
||||||
|
|
||||||
|
# jsonschema is imported lazily as it is heavy to import
|
||||||
|
# and increases the start-up time
|
||||||
|
def _make_validator():
|
||||||
def _validate_spec(validator, is_spec, instance, schema):
|
def _validate_spec(validator, is_spec, instance, schema):
|
||||||
"""Check if all additional keys are valid specs."""
|
"""Check if the attributes on instance are valid specs."""
|
||||||
import spack.spec_parser
|
import spack.spec_parser
|
||||||
|
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
properties = schema.get("properties") or {}
|
|
||||||
|
|
||||||
for spec_str in instance:
|
for spec_str in instance:
|
||||||
if spec_str in properties:
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
spack.spec_parser.parse(spec_str)
|
spack.spec_parser.parse(spec_str)
|
||||||
except SpecSyntaxError:
|
except SpecSyntaxError:
|
||||||
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
||||||
|
|
||||||
|
|
||||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||||
return
|
return
|
||||||
@@ -65,13 +64,15 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
|||||||
if errors:
|
if errors:
|
||||||
yield jsonschema.ValidationError("\n".join(errors))
|
yield jsonschema.ValidationError("\n".join(errors))
|
||||||
|
|
||||||
|
return jsonschema.validators.extend(
|
||||||
Validator = jsonschema.validators.extend(
|
|
||||||
jsonschema.Draft7Validator,
|
jsonschema.Draft7Validator,
|
||||||
{"additionalKeysAreSpecs": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Validator = llnl.util.lang.Singleton(_make_validator)
|
||||||
|
|
||||||
|
|
||||||
def _append(string: str) -> bool:
|
def _append(string: str) -> bool:
|
||||||
"""Test if a spack YAML string is an append.
|
"""Test if a spack YAML string is an append.
|
||||||
|
|
||||||
|
|||||||
@@ -84,11 +84,7 @@
|
|||||||
"duplicates": {
|
"duplicates": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]},
|
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||||
"max_dupes": {
|
|
||||||
"type": "object",
|
|
||||||
"additional_properties": {"type": "integer", "minimum": 1},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"static_analysis": {"type": "boolean"},
|
"static_analysis": {"type": "boolean"},
|
||||||
|
|||||||
@@ -58,15 +58,6 @@
|
|||||||
{"type": "string"}, # deprecated
|
{"type": "string"}, # deprecated
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"concretization_cache": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enable": {"type": "boolean"},
|
|
||||||
"url": {"type": "string"},
|
|
||||||
"entry_limit": {"type": "integer", "minimum": 0},
|
|
||||||
"size_limit": {"type": "integer", "minimum": 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||||
"install_path_scheme": {"type": "string"}, # deprecated
|
"install_path_scheme": {"type": "string"}, # deprecated
|
||||||
"build_stage": {
|
"build_stage": {
|
||||||
|
|||||||
@@ -29,7 +29,11 @@
|
|||||||
# merged configuration scope schemas
|
# merged configuration scope schemas
|
||||||
spack.schema.merged.properties,
|
spack.schema.merged.properties,
|
||||||
# extra environment schema properties
|
# extra environment schema properties
|
||||||
{"specs": spec_list_schema, "include_concrete": include_concrete},
|
{
|
||||||
|
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||||
|
"specs": spec_list_schema,
|
||||||
|
"include_concrete": include_concrete,
|
||||||
|
},
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""Schema for include.yaml configuration file.
|
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/include.py
|
|
||||||
:lines: 12-
|
|
||||||
"""
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
#: Properties for inclusion in other schemas
|
|
||||||
properties: Dict[str, Any] = {
|
|
||||||
"include": {
|
|
||||||
"type": "array",
|
|
||||||
"default": [],
|
|
||||||
"additionalProperties": False,
|
|
||||||
"items": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"when": {"type": "string"},
|
|
||||||
"path": {"type": "string"},
|
|
||||||
"sha256": {"type": "string"},
|
|
||||||
"optional": {"type": "boolean"},
|
|
||||||
},
|
|
||||||
"required": ["path"],
|
|
||||||
"additionalProperties": False,
|
|
||||||
},
|
|
||||||
{"type": "string"},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#: Full schema with metadata
|
|
||||||
schema = {
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"title": "Spack include configuration file schema",
|
|
||||||
"properties": properties,
|
|
||||||
}
|
|
||||||
@@ -21,7 +21,6 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
import spack.schema.include
|
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -41,7 +40,6 @@
|
|||||||
spack.schema.definitions.properties,
|
spack.schema.definitions.properties,
|
||||||
spack.schema.develop.properties,
|
spack.schema.develop.properties,
|
||||||
spack.schema.env_vars.properties,
|
spack.schema.env_vars.properties,
|
||||||
spack.schema.include.properties,
|
|
||||||
spack.schema.mirrors.properties,
|
spack.schema.mirrors.properties,
|
||||||
spack.schema.modules.properties,
|
spack.schema.modules.properties,
|
||||||
spack.schema.packages.properties,
|
spack.schema.packages.properties,
|
||||||
@@ -50,6 +48,7 @@
|
|||||||
spack.schema.view.properties,
|
spack.schema.view.properties,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#: Full schema with metadata
|
#: Full schema with metadata
|
||||||
schema = {
|
schema = {
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
|||||||
@@ -39,7 +39,7 @@
|
|||||||
"load": array_of_strings,
|
"load": array_of_strings,
|
||||||
"suffixes": {
|
"suffixes": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalKeysAreSpecs": True,
|
"validate_spec": True,
|
||||||
"additionalProperties": {"type": "string"}, # key
|
"additionalProperties": {"type": "string"}, # key
|
||||||
},
|
},
|
||||||
"environment": spack.schema.environment.definition,
|
"environment": spack.schema.environment.definition,
|
||||||
@@ -48,7 +48,11 @@
|
|||||||
|
|
||||||
projections_scheme = spack.schema.projections.properties["projections"]
|
projections_scheme = spack.schema.projections.properties["projections"]
|
||||||
|
|
||||||
common_props = {
|
module_type_configuration: Dict = {
|
||||||
|
"type": "object",
|
||||||
|
"default": {},
|
||||||
|
"validate_spec": True,
|
||||||
|
"properties": {
|
||||||
"verbose": {"type": "boolean", "default": False},
|
"verbose": {"type": "boolean", "default": False},
|
||||||
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
||||||
"include": array_of_strings,
|
"include": array_of_strings,
|
||||||
@@ -59,33 +63,25 @@
|
|||||||
"naming_scheme": {"type": "string"},
|
"naming_scheme": {"type": "string"},
|
||||||
"projections": projections_scheme,
|
"projections": projections_scheme,
|
||||||
"all": module_file_configuration,
|
"all": module_file_configuration,
|
||||||
}
|
},
|
||||||
|
|
||||||
tcl_configuration = {
|
|
||||||
"type": "object",
|
|
||||||
"default": {},
|
|
||||||
"additionalKeysAreSpecs": True,
|
|
||||||
"properties": {**common_props},
|
|
||||||
"additionalProperties": module_file_configuration,
|
"additionalProperties": module_file_configuration,
|
||||||
}
|
}
|
||||||
|
|
||||||
lmod_configuration = {
|
tcl_configuration = module_type_configuration.copy()
|
||||||
"type": "object",
|
|
||||||
"default": {},
|
lmod_configuration = module_type_configuration.copy()
|
||||||
"additionalKeysAreSpecs": True,
|
lmod_configuration["properties"].update(
|
||||||
"properties": {
|
{
|
||||||
**common_props,
|
|
||||||
"core_compilers": array_of_strings,
|
"core_compilers": array_of_strings,
|
||||||
"hierarchy": array_of_strings,
|
"hierarchy": array_of_strings,
|
||||||
"core_specs": array_of_strings,
|
"core_specs": array_of_strings,
|
||||||
"filter_hierarchy_specs": {
|
"filter_hierarchy_specs": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalKeysAreSpecs": True,
|
"validate_spec": True,
|
||||||
"additionalProperties": array_of_strings,
|
"additionalProperties": array_of_strings,
|
||||||
},
|
},
|
||||||
},
|
|
||||||
"additionalProperties": module_file_configuration,
|
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
|
||||||
module_config_properties = {
|
module_config_properties = {
|
||||||
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
||||||
|
|||||||
@@ -5,12 +5,8 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import enum
|
import enum
|
||||||
import errno
|
|
||||||
import functools
|
import functools
|
||||||
import hashlib
|
|
||||||
import io
|
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import pprint
|
import pprint
|
||||||
@@ -20,25 +16,12 @@
|
|||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import (
|
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
||||||
IO,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
NamedTuple,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import current_file_position
|
|
||||||
from llnl.util.lang import elide_list
|
from llnl.util.lang import elide_list
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
@@ -52,24 +35,19 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
import spack.patch
|
|
||||||
import spack.paths
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.solver.splicing
|
import spack.solver.splicing
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
import spack.util.hash
|
|
||||||
import spack.util.libc
|
import spack.util.libc
|
||||||
import spack.util.module_cmd as md
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.timer
|
import spack.util.timer
|
||||||
import spack.variant as vt
|
import spack.variant as vt
|
||||||
import spack.version as vn
|
import spack.version as vn
|
||||||
import spack.version.git_ref_lookup
|
import spack.version.git_ref_lookup
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.util.file_cache import FileCache
|
|
||||||
|
|
||||||
from .core import (
|
from .core import (
|
||||||
AspFunction,
|
AspFunction,
|
||||||
@@ -95,19 +73,17 @@
|
|||||||
#: Enable the addition of a runtime node
|
#: Enable the addition of a runtime node
|
||||||
WITH_RUNTIME = sys.platform != "win32"
|
WITH_RUNTIME = sys.platform != "win32"
|
||||||
|
|
||||||
|
#: Data class that contain configuration on what a
|
||||||
class OutputConfiguration(NamedTuple):
|
#: clingo solve should output.
|
||||||
"""Data class that contains configuration on what a clingo solve should output."""
|
#:
|
||||||
|
#: Args:
|
||||||
#: Print out coarse timers for different solve phases
|
#: timers (bool): Print out coarse timers for different solve phases.
|
||||||
timers: bool
|
#: stats (bool): Whether to output Clingo's internal solver statistics.
|
||||||
#: Whether to output Clingo's internal solver statistics
|
#: out: Optional output stream for the generated ASP program.
|
||||||
stats: bool
|
#: setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||||
#: Optional output stream for the generated ASP program
|
OutputConfiguration = collections.namedtuple(
|
||||||
out: Optional[io.IOBase]
|
"OutputConfiguration", ["timers", "stats", "out", "setup_only"]
|
||||||
#: If True, stop after setup and don't solve
|
)
|
||||||
setup_only: bool
|
|
||||||
|
|
||||||
|
|
||||||
#: Default output configuration for a solve
|
#: Default output configuration for a solve
|
||||||
DEFAULT_OUTPUT_CONFIGURATION = OutputConfiguration(
|
DEFAULT_OUTPUT_CONFIGURATION = OutputConfiguration(
|
||||||
@@ -522,7 +498,7 @@ def _compute_specs_from_answer_set(self):
|
|||||||
# The specs must be unified to get here, so it is safe to associate any satisfying spec
|
# The specs must be unified to get here, so it is safe to associate any satisfying spec
|
||||||
# with the input. Multiple inputs may be matched to the same concrete spec
|
# with the input. Multiple inputs may be matched to the same concrete spec
|
||||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||||
if spack.repo.PATH.is_virtual(input_spec.name):
|
if input_spec.virtual:
|
||||||
providers = [
|
providers = [
|
||||||
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
||||||
]
|
]
|
||||||
@@ -557,363 +533,6 @@ def format_unsolved(unsolved_specs):
|
|||||||
msg += "\n\t(No candidate specs from solver)"
|
msg += "\n\t(No candidate specs from solver)"
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def to_dict(self, test: bool = False) -> dict:
|
|
||||||
"""Produces dict representation of Result object
|
|
||||||
|
|
||||||
Does not include anything related to unsatisfiability as we
|
|
||||||
are only interested in storing satisfiable results
|
|
||||||
"""
|
|
||||||
serial_node_arg = (
|
|
||||||
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
|
|
||||||
)
|
|
||||||
ret = dict()
|
|
||||||
ret["asp"] = self.asp
|
|
||||||
ret["criteria"] = self.criteria
|
|
||||||
ret["optimal"] = self.optimal
|
|
||||||
ret["warnings"] = self.warnings
|
|
||||||
ret["nmodels"] = self.nmodels
|
|
||||||
ret["abstract_specs"] = [str(x) for x in self.abstract_specs]
|
|
||||||
ret["satisfiable"] = self.satisfiable
|
|
||||||
serial_answers = []
|
|
||||||
for answer in self.answers:
|
|
||||||
serial_answer = answer[:2]
|
|
||||||
serial_answer_dict = {}
|
|
||||||
for node, spec in answer[2].items():
|
|
||||||
serial_answer_dict[serial_node_arg(node)] = spec.to_dict()
|
|
||||||
serial_answer = serial_answer + (serial_answer_dict,)
|
|
||||||
serial_answers.append(serial_answer)
|
|
||||||
ret["answers"] = serial_answers
|
|
||||||
ret["specs_by_input"] = {}
|
|
||||||
input_specs = {} if not self.specs_by_input else self.specs_by_input
|
|
||||||
for input, spec in input_specs.items():
|
|
||||||
ret["specs_by_input"][str(input)] = spec.to_dict()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_dict(obj: dict):
|
|
||||||
"""Returns Result object from compatible dictionary"""
|
|
||||||
|
|
||||||
def _dict_to_node_argument(dict):
|
|
||||||
id = dict["id"]
|
|
||||||
pkg = dict["pkg"]
|
|
||||||
return NodeArgument(id=id, pkg=pkg)
|
|
||||||
|
|
||||||
def _str_to_spec(spec_str):
|
|
||||||
return spack.spec.Spec(spec_str)
|
|
||||||
|
|
||||||
def _dict_to_spec(spec_dict):
|
|
||||||
loaded_spec = spack.spec.Spec.from_dict(spec_dict)
|
|
||||||
_ensure_external_path_if_external(loaded_spec)
|
|
||||||
spack.spec.Spec.ensure_no_deprecated(loaded_spec)
|
|
||||||
return loaded_spec
|
|
||||||
|
|
||||||
asp = obj.get("asp")
|
|
||||||
spec_list = obj.get("abstract_specs")
|
|
||||||
if not spec_list:
|
|
||||||
raise RuntimeError("Invalid json for concretization Result object")
|
|
||||||
if spec_list:
|
|
||||||
spec_list = [_str_to_spec(x) for x in spec_list]
|
|
||||||
result = Result(spec_list, asp)
|
|
||||||
result.criteria = obj.get("criteria")
|
|
||||||
result.optimal = obj.get("optimal")
|
|
||||||
result.warnings = obj.get("warnings")
|
|
||||||
result.nmodels = obj.get("nmodels")
|
|
||||||
result.satisfiable = obj.get("satisfiable")
|
|
||||||
result._unsolved_specs = []
|
|
||||||
answers = []
|
|
||||||
for answer in obj.get("answers", []):
|
|
||||||
loaded_answer = answer[:2]
|
|
||||||
answer_node_dict = {}
|
|
||||||
for node, spec in answer[2].items():
|
|
||||||
answer_node_dict[_dict_to_node_argument(json.loads(node))] = _dict_to_spec(spec)
|
|
||||||
loaded_answer.append(answer_node_dict)
|
|
||||||
answers.append(tuple(loaded_answer))
|
|
||||||
result.answers = answers
|
|
||||||
result._concrete_specs_by_input = {}
|
|
||||||
result._concrete_specs = []
|
|
||||||
for input, spec in obj.get("specs_by_input", {}).items():
|
|
||||||
result._concrete_specs_by_input[_str_to_spec(input)] = _dict_to_spec(spec)
|
|
||||||
result._concrete_specs.append(_dict_to_spec(spec))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class ConcretizationCache:
|
|
||||||
"""Store for Spack concretization results and statistics
|
|
||||||
|
|
||||||
Serializes solver result objects and statistics to json and stores
|
|
||||||
at a given endpoint in a cache associated by the sha256 of the
|
|
||||||
asp problem and the involved control files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, root: Union[str, None] = None):
|
|
||||||
root = root or spack.config.get(
|
|
||||||
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
|
||||||
)
|
|
||||||
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
|
|
||||||
self._fc = FileCache(self.root)
|
|
||||||
self._cache_manifest = ".cache_manifest"
|
|
||||||
self._manifest_queue: List[Tuple[pathlib.Path, int]] = []
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
"""Prunes the concretization cache according to configured size and entry
|
|
||||||
count limits. Cleanup is done in FIFO ordering."""
|
|
||||||
# TODO: determine a better default
|
|
||||||
entry_limit = spack.config.get("config:concretization_cache:entry_limit", 1000)
|
|
||||||
bytes_limit = spack.config.get("config:concretization_cache:size_limit", 3e8)
|
|
||||||
# lock the entire buildcache as we're removing a lot of data from the
|
|
||||||
# manifest and cache itself
|
|
||||||
with self._fc.read_transaction(self._cache_manifest) as f:
|
|
||||||
count, cache_bytes = self._extract_cache_metadata(f)
|
|
||||||
if not count or not cache_bytes:
|
|
||||||
return
|
|
||||||
entry_count = int(count)
|
|
||||||
manifest_bytes = int(cache_bytes)
|
|
||||||
# move beyond the metadata entry
|
|
||||||
f.readline()
|
|
||||||
if entry_count > entry_limit and entry_limit > 0:
|
|
||||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
|
||||||
# prune the oldest 10% or until we have removed 10% of
|
|
||||||
# total bytes starting from oldest entry
|
|
||||||
# TODO: make this configurable?
|
|
||||||
prune_count = entry_limit // 10
|
|
||||||
lines_to_prune = f.readlines(prune_count)
|
|
||||||
for i, line in enumerate(lines_to_prune):
|
|
||||||
sha, cache_entry_bytes = self._parse_manifest_entry(line)
|
|
||||||
if sha and cache_entry_bytes:
|
|
||||||
cache_path = self._cache_path_from_hash(sha)
|
|
||||||
if self._fc.remove(cache_path):
|
|
||||||
entry_count -= 1
|
|
||||||
manifest_bytes -= int(cache_entry_bytes)
|
|
||||||
else:
|
|
||||||
tty.warn(
|
|
||||||
f"Invalid concretization cache entry: '{line}' on line: {i+1}"
|
|
||||||
)
|
|
||||||
self._write_manifest(f, entry_count, manifest_bytes)
|
|
||||||
|
|
||||||
elif manifest_bytes > bytes_limit and bytes_limit > 0:
|
|
||||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
|
||||||
# take 10% of current size off
|
|
||||||
prune_amount = bytes_limit // 10
|
|
||||||
total_pruned = 0
|
|
||||||
i = 0
|
|
||||||
while total_pruned < prune_amount:
|
|
||||||
sha, manifest_cache_bytes = self._parse_manifest_entry(f.readline())
|
|
||||||
if sha and manifest_cache_bytes:
|
|
||||||
entry_bytes = int(manifest_cache_bytes)
|
|
||||||
cache_path = self.root / sha[:2] / sha
|
|
||||||
if self._safe_remove(cache_path):
|
|
||||||
entry_count -= 1
|
|
||||||
entry_bytes -= entry_bytes
|
|
||||||
total_pruned += entry_bytes
|
|
||||||
else:
|
|
||||||
tty.warn(
|
|
||||||
"Invalid concretization cache entry "
|
|
||||||
f"'{sha} {manifest_cache_bytes}' on line: {i}"
|
|
||||||
)
|
|
||||||
i += 1
|
|
||||||
self._write_manifest(f, entry_count, manifest_bytes)
|
|
||||||
for cache_dir in self.root.iterdir():
|
|
||||||
if cache_dir.is_dir() and not any(cache_dir.iterdir()):
|
|
||||||
self._safe_remove(cache_dir)
|
|
||||||
|
|
||||||
def cache_entries(self):
|
|
||||||
"""Generator producing cache entries"""
|
|
||||||
for cache_dir in self.root.iterdir():
|
|
||||||
# ensure component is cache entry directory
|
|
||||||
# not metadata file
|
|
||||||
if cache_dir.is_dir():
|
|
||||||
for cache_entry in cache_dir.iterdir():
|
|
||||||
if not cache_entry.is_dir():
|
|
||||||
yield cache_entry
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Improperly formed concretization cache. "
|
|
||||||
f"Directory {cache_entry.name} is improperly located "
|
|
||||||
"within the concretization cache."
|
|
||||||
)
|
|
||||||
|
|
||||||
def _parse_manifest_entry(self, line):
|
|
||||||
"""Returns parsed manifest entry lines
|
|
||||||
with handling for invalid reads."""
|
|
||||||
if line:
|
|
||||||
cache_values = line.strip("\n").split(" ")
|
|
||||||
if len(cache_values) < 2:
|
|
||||||
tty.warn(f"Invalid cache entry at {line}")
|
|
||||||
return None, None
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
def _write_manifest(self, manifest_file, entry_count, entry_bytes):
|
|
||||||
"""Writes new concretization cache manifest file.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
manifest_file: IO stream opened for readin
|
|
||||||
and writing wrapping the manifest file
|
|
||||||
with cursor at calltime set to location
|
|
||||||
where manifest should be truncated
|
|
||||||
entry_count: new total entry count
|
|
||||||
entry_bytes: new total entry bytes count
|
|
||||||
|
|
||||||
"""
|
|
||||||
persisted_entries = manifest_file.readlines()
|
|
||||||
manifest_file.truncate(0)
|
|
||||||
manifest_file.write(f"{entry_count} {entry_bytes}\n")
|
|
||||||
manifest_file.writelines(persisted_entries)
|
|
||||||
|
|
||||||
def _results_from_cache(self, cache_entry_buffer: IO[str]) -> Union[Result, None]:
|
|
||||||
"""Returns a Results object from the concretizer cache
|
|
||||||
|
|
||||||
Reads the cache hit and uses `Result`'s own deserializer
|
|
||||||
to produce a new Result object
|
|
||||||
"""
|
|
||||||
|
|
||||||
with current_file_position(cache_entry_buffer, 0):
|
|
||||||
cache_str = cache_entry_buffer.read()
|
|
||||||
# TODO: Should this be an error if None?
|
|
||||||
# Same for _stats_from_cache
|
|
||||||
if cache_str:
|
|
||||||
cache_entry = json.loads(cache_str)
|
|
||||||
result_json = cache_entry["results"]
|
|
||||||
return Result.from_dict(result_json)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _stats_from_cache(self, cache_entry_buffer: IO[str]) -> Union[List, None]:
|
|
||||||
"""Returns concretization statistic from the
|
|
||||||
concretization associated with the cache.
|
|
||||||
|
|
||||||
Deserialzes the the json representation of the
|
|
||||||
statistics covering the cached concretization run
|
|
||||||
and returns the Python data structures
|
|
||||||
"""
|
|
||||||
with current_file_position(cache_entry_buffer, 0):
|
|
||||||
cache_str = cache_entry_buffer.read()
|
|
||||||
if cache_str:
|
|
||||||
return json.loads(cache_str)["statistics"]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _extract_cache_metadata(self, cache_stream: IO[str]):
|
|
||||||
"""Extracts and returns cache entry count and bytes count from head of manifest
|
|
||||||
file"""
|
|
||||||
# make sure we're always reading from the beginning of the stream
|
|
||||||
# concretization cache manifest data lives at the top of the file
|
|
||||||
with current_file_position(cache_stream, 0):
|
|
||||||
return self._parse_manifest_entry(cache_stream.readline())
|
|
||||||
|
|
||||||
def _prefix_digest(self, problem: str) -> Tuple[str, str]:
|
|
||||||
"""Return the first two characters of, and the full, sha256 of the given asp problem"""
|
|
||||||
prob_digest = hashlib.sha256(problem.encode()).hexdigest()
|
|
||||||
prefix = prob_digest[:2]
|
|
||||||
return prefix, prob_digest
|
|
||||||
|
|
||||||
def _cache_path_from_problem(self, problem: str) -> pathlib.Path:
|
|
||||||
"""Returns a Path object representing the path to the cache
|
|
||||||
entry for the given problem"""
|
|
||||||
prefix, digest = self._prefix_digest(problem)
|
|
||||||
return pathlib.Path(prefix) / digest
|
|
||||||
|
|
||||||
def _cache_path_from_hash(self, hash: str) -> pathlib.Path:
|
|
||||||
"""Returns a Path object representing the cache entry
|
|
||||||
corresponding to the given sha256 hash"""
|
|
||||||
return pathlib.Path(hash[:2]) / hash
|
|
||||||
|
|
||||||
def _lock_prefix_from_cache_path(self, cache_path: str):
|
|
||||||
"""Returns the bit location corresponding to a given cache entry path
|
|
||||||
for file locking"""
|
|
||||||
return spack.util.hash.base32_prefix_bits(
|
|
||||||
spack.util.hash.b32_hash(cache_path), spack.util.crypto.bit_length(sys.maxsize)
|
|
||||||
)
|
|
||||||
|
|
||||||
def flush_manifest(self):
|
|
||||||
"""Updates the concretization cache manifest file after a cache write operation
|
|
||||||
Updates the current byte count and entry counts and writes to the head of the
|
|
||||||
manifest file"""
|
|
||||||
manifest_file = self.root / self._cache_manifest
|
|
||||||
manifest_file.touch(exist_ok=True)
|
|
||||||
with open(manifest_file, "r+", encoding="utf-8") as f:
|
|
||||||
# check if manifest is empty
|
|
||||||
count, cache_bytes = self._extract_cache_metadata(f)
|
|
||||||
if not count or not cache_bytes:
|
|
||||||
# cache is unintialized
|
|
||||||
count = 0
|
|
||||||
cache_bytes = 0
|
|
||||||
f.seek(0, io.SEEK_END)
|
|
||||||
for manifest_update in self._manifest_queue:
|
|
||||||
entry_path, entry_bytes = manifest_update
|
|
||||||
count += 1
|
|
||||||
cache_bytes += entry_bytes
|
|
||||||
f.write(f"{entry_path.name} {entry_bytes}")
|
|
||||||
f.seek(0, io.SEEK_SET)
|
|
||||||
new_stats = f"{int(count)+1} {int(cache_bytes)}\n"
|
|
||||||
f.write(new_stats)
|
|
||||||
|
|
||||||
def _register_cache_update(self, cache_path: pathlib.Path, bytes_written: int):
|
|
||||||
"""Adds manifest entry to update queue for later updates to the manifest"""
|
|
||||||
self._manifest_queue.append((cache_path, bytes_written))
|
|
||||||
|
|
||||||
def _safe_remove(self, cache_dir: pathlib.Path):
|
|
||||||
"""Removes cache entries with handling for the case where the entry has been
|
|
||||||
removed already or there are multiple cache entries in a directory"""
|
|
||||||
try:
|
|
||||||
if cache_dir.is_dir():
|
|
||||||
cache_dir.rmdir()
|
|
||||||
else:
|
|
||||||
cache_dir.unlink()
|
|
||||||
return True
|
|
||||||
except FileNotFoundError:
|
|
||||||
# This is acceptable, removal is idempotent
|
|
||||||
pass
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOTEMPTY:
|
|
||||||
# there exists another cache entry in this directory, don't clean yet
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
def store(self, problem: str, result: Result, statistics: List, test: bool = False):
|
|
||||||
"""Creates entry in concretization cache for problem if none exists,
|
|
||||||
storing the concretization Result object and statistics in the cache
|
|
||||||
as serialized json joined as a single file.
|
|
||||||
|
|
||||||
Hash membership is computed based on the sha256 of the provided asp
|
|
||||||
problem.
|
|
||||||
"""
|
|
||||||
cache_path = self._cache_path_from_problem(problem)
|
|
||||||
if self._fc.init_entry(cache_path):
|
|
||||||
# if an entry for this conc hash exists already, we're don't want
|
|
||||||
# to overwrite, just exit
|
|
||||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
|
||||||
return
|
|
||||||
with self._fc.write_transaction(cache_path) as (old, new):
|
|
||||||
if old:
|
|
||||||
# Entry for this conc hash exists already, do not overwrite
|
|
||||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
|
||||||
return
|
|
||||||
cache_dict = {"results": result.to_dict(test=test), "statistics": statistics}
|
|
||||||
bytes_written = new.write(json.dumps(cache_dict))
|
|
||||||
self._register_cache_update(cache_path, bytes_written)
|
|
||||||
|
|
||||||
def fetch(self, problem: str) -> Union[Tuple[Result, List], Tuple[None, None]]:
|
|
||||||
"""Returns the concretization cache result for a lookup based on the given problem.
|
|
||||||
|
|
||||||
Checks the concretization cache for the given problem, and either returns the
|
|
||||||
Python objects cached on disk representing the concretization results and statistics
|
|
||||||
or returns none if no cache entry was found.
|
|
||||||
"""
|
|
||||||
cache_path = self._cache_path_from_problem(problem)
|
|
||||||
result, statistics = None, None
|
|
||||||
with self._fc.read_transaction(cache_path) as f:
|
|
||||||
if f:
|
|
||||||
result = self._results_from_cache(f)
|
|
||||||
statistics = self._stats_from_cache(f)
|
|
||||||
if result and statistics:
|
|
||||||
tty.debug(f"Concretization cache hit at {str(cache_path)}")
|
|
||||||
return result, statistics
|
|
||||||
tty.debug(f"Concretization cache miss at {str(cache_path)}")
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
|
|
||||||
CONC_CACHE: ConcretizationCache = llnl.util.lang.Singleton(
|
|
||||||
lambda: ConcretizationCache()
|
|
||||||
) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_packages_yaml(packages_yaml):
|
def _normalize_packages_yaml(packages_yaml):
|
||||||
normalized_yaml = copy.copy(packages_yaml)
|
normalized_yaml = copy.copy(packages_yaml)
|
||||||
@@ -1182,15 +801,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||||
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||||
control_files = ["concretize.lp", "heuristic.lp", "display.lp"]
|
|
||||||
if not setup.concretize_everything:
|
|
||||||
control_files.append("when_possible.lp")
|
|
||||||
if using_libc_compatibility():
|
|
||||||
control_files.append("libc_compatibility.lp")
|
|
||||||
else:
|
|
||||||
control_files.append("os_compatibility.lp")
|
|
||||||
if setup.enable_splicing:
|
|
||||||
control_files.append("splices.lp")
|
|
||||||
|
|
||||||
timer.start("setup")
|
timer.start("setup")
|
||||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||||
@@ -1200,30 +810,25 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
return Result(specs), None, None
|
return Result(specs), None, None
|
||||||
timer.stop("setup")
|
timer.stop("setup")
|
||||||
|
|
||||||
timer.start("cache-check")
|
|
||||||
timer.start("ordering")
|
|
||||||
# ensure deterministic output
|
|
||||||
problem_repr = "\n".join(sorted(asp_problem.split("\n")))
|
|
||||||
timer.stop("ordering")
|
|
||||||
parent_dir = os.path.dirname(__file__)
|
|
||||||
full_path = lambda x: os.path.join(parent_dir, x)
|
|
||||||
abs_control_files = [full_path(x) for x in control_files]
|
|
||||||
for ctrl_file in abs_control_files:
|
|
||||||
with open(ctrl_file, "r", encoding="utf-8") as f:
|
|
||||||
problem_repr += "\n" + f.read()
|
|
||||||
|
|
||||||
result = None
|
|
||||||
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
|
|
||||||
if conc_cache_enabled:
|
|
||||||
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
|
|
||||||
|
|
||||||
timer.stop("cache-check")
|
|
||||||
if not result:
|
|
||||||
timer.start("load")
|
timer.start("load")
|
||||||
# Add the problem instance
|
# Add the problem instance
|
||||||
self.control.add("base", [], asp_problem)
|
self.control.add("base", [], asp_problem)
|
||||||
# Load the files
|
# Load the file itself
|
||||||
[self.control.load(lp) for lp in abs_control_files]
|
parent_dir = os.path.dirname(__file__)
|
||||||
|
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||||
|
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||||
|
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||||
|
if not setup.concretize_everything:
|
||||||
|
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||||
|
|
||||||
|
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
|
||||||
|
if using_libc_compatibility():
|
||||||
|
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||||
|
else:
|
||||||
|
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||||
|
if setup.enable_splicing:
|
||||||
|
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||||
|
|
||||||
timer.stop("load")
|
timer.stop("load")
|
||||||
|
|
||||||
# Grounding is the first step in the solve -- it turns our facts
|
# Grounding is the first step in the solve -- it turns our facts
|
||||||
@@ -1258,9 +863,7 @@ def on_model(model):
|
|||||||
finished = handle.wait(time_limit)
|
finished = handle.wait(time_limit)
|
||||||
if not finished:
|
if not finished:
|
||||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||||
header = (
|
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||||
f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
|
||||||
)
|
|
||||||
if error_on_timeout:
|
if error_on_timeout:
|
||||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||||
warnings.warn(f"{header}, using the best configuration found so far")
|
warnings.warn(f"{header}, using the best configuration found so far")
|
||||||
@@ -1284,9 +887,7 @@ def on_model(model):
|
|||||||
error_handler.raise_if_errors()
|
error_handler.raise_if_errors()
|
||||||
|
|
||||||
# build specs from spec attributes in the model
|
# build specs from spec attributes in the model
|
||||||
spec_attrs = [
|
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
||||||
(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")
|
|
||||||
]
|
|
||||||
answers = builder.build_specs(spec_attrs)
|
answers = builder.build_specs(spec_attrs)
|
||||||
|
|
||||||
# add best spec to the results
|
# add best spec to the results
|
||||||
@@ -1307,6 +908,14 @@ def on_model(model):
|
|||||||
result.control = self.control
|
result.control = self.control
|
||||||
result.cores.extend(cores)
|
result.cores.extend(cores)
|
||||||
|
|
||||||
|
if output.timers:
|
||||||
|
timer.write_tty()
|
||||||
|
print()
|
||||||
|
|
||||||
|
if output.stats:
|
||||||
|
print("Statistics:")
|
||||||
|
pprint.pprint(self.control.statistics)
|
||||||
|
|
||||||
result.raise_if_unsat()
|
result.raise_if_unsat()
|
||||||
|
|
||||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||||
@@ -1316,17 +925,8 @@ def on_model(model):
|
|||||||
" that do not satisfy the request. Please report a bug at "
|
" that do not satisfy the request. Please report a bug at "
|
||||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||||
)
|
)
|
||||||
if conc_cache_enabled:
|
|
||||||
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
|
||||||
concretization_stats = self.control.statistics
|
|
||||||
if output.timers:
|
|
||||||
timer.write_tty()
|
|
||||||
print()
|
|
||||||
|
|
||||||
if output.stats:
|
return result, timer, self.control.statistics
|
||||||
print("Statistics:")
|
|
||||||
pprint.pprint(concretization_stats)
|
|
||||||
return result, timer, concretization_stats
|
|
||||||
|
|
||||||
|
|
||||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||||
@@ -1768,7 +1368,7 @@ def effect_rules(self):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.gen.h2("Imposed requirements")
|
self.gen.h2("Imposed requirements")
|
||||||
for name in sorted(self._effect_cache):
|
for name in self._effect_cache:
|
||||||
cache = self._effect_cache[name]
|
cache = self._effect_cache[name]
|
||||||
for (spec_str, _), (effect_id, requirements) in cache.items():
|
for (spec_str, _), (effect_id, requirements) in cache.items():
|
||||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||||
@@ -1821,8 +1421,8 @@ def define_variant(
|
|||||||
|
|
||||||
elif isinstance(values, vt.DisjointSetsOfValues):
|
elif isinstance(values, vt.DisjointSetsOfValues):
|
||||||
union = set()
|
union = set()
|
||||||
for sid, s in enumerate(sorted(values.sets)):
|
for sid, s in enumerate(values.sets):
|
||||||
for value in sorted(s):
|
for value in s:
|
||||||
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
||||||
union.update(s)
|
union.update(s)
|
||||||
values = union
|
values = union
|
||||||
@@ -2003,7 +1603,7 @@ def package_provider_rules(self, pkg):
|
|||||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
||||||
|
|
||||||
for when, provided in pkg.provided.items():
|
for when, provided in pkg.provided.items():
|
||||||
for vpkg in sorted(provided):
|
for vpkg in provided:
|
||||||
if vpkg.name not in self.possible_virtuals:
|
if vpkg.name not in self.possible_virtuals:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -2018,8 +1618,8 @@ def package_provider_rules(self, pkg):
|
|||||||
condition_id = self.condition(
|
condition_id = self.condition(
|
||||||
when, required_name=pkg.name, msg="Virtuals are provided together"
|
when, required_name=pkg.name, msg="Virtuals are provided together"
|
||||||
)
|
)
|
||||||
for set_id, virtuals_together in enumerate(sorted(sets_of_virtuals)):
|
for set_id, virtuals_together in enumerate(sets_of_virtuals):
|
||||||
for name in sorted(virtuals_together):
|
for name in virtuals_together:
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
||||||
)
|
)
|
||||||
@@ -2053,16 +1653,13 @@ def track_dependencies(input_spec, requirements):
|
|||||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||||
|
|
||||||
def dependency_holds(input_spec, requirements):
|
def dependency_holds(input_spec, requirements):
|
||||||
result = remove_node(input_spec, requirements) + [
|
return remove_node(input_spec, requirements) + [
|
||||||
fn.attr(
|
fn.attr(
|
||||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||||
)
|
)
|
||||||
for t in dt.ALL_FLAGS
|
for t in dt.ALL_FLAGS
|
||||||
if t & depflag
|
if t & depflag
|
||||||
]
|
]
|
||||||
if input_spec.name not in pkg.extendees:
|
|
||||||
return result
|
|
||||||
return result + [fn.attr("extends", pkg.name, input_spec.name)]
|
|
||||||
|
|
||||||
context = ConditionContext()
|
context = ConditionContext()
|
||||||
context.source = ConstraintOrigin.append_type_suffix(
|
context.source = ConstraintOrigin.append_type_suffix(
|
||||||
@@ -2129,7 +1726,7 @@ def package_splice_rules(self, pkg):
|
|||||||
for map in pkg.variants.values():
|
for map in pkg.variants.values():
|
||||||
for k in map:
|
for k in map:
|
||||||
filt_match_variants.add(k)
|
filt_match_variants.add(k)
|
||||||
filt_match_variants = sorted(filt_match_variants)
|
filt_match_variants = list(filt_match_variants)
|
||||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||||
)
|
)
|
||||||
@@ -2234,8 +1831,8 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
|||||||
spec.attach_git_version_lookup()
|
spec.attach_git_version_lookup()
|
||||||
|
|
||||||
when_spec = spec
|
when_spec = spec
|
||||||
if virtual and spec.name != pkg_name:
|
if virtual:
|
||||||
when_spec = spack.spec.Spec(f"^[virtuals={pkg_name}] {spec.name}")
|
when_spec = spack.spec.Spec(pkg_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
context = ConditionContext()
|
context = ConditionContext()
|
||||||
@@ -2483,11 +2080,7 @@ def _spec_clauses(
|
|||||||
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
|
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
|
||||||
|
|
||||||
if spec.name:
|
if spec.name:
|
||||||
clauses.append(
|
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
|
||||||
f.node(spec.name)
|
|
||||||
if not spack.repo.PATH.is_virtual(spec.name)
|
|
||||||
else f.virtual_node(spec.name)
|
|
||||||
)
|
|
||||||
if spec.namespace:
|
if spec.namespace:
|
||||||
clauses.append(f.namespace(spec.name, spec.namespace))
|
clauses.append(f.namespace(spec.name, spec.namespace))
|
||||||
|
|
||||||
@@ -2514,7 +2107,7 @@ def _spec_clauses(
|
|||||||
|
|
||||||
for value in variant.value_as_tuple:
|
for value in variant.value_as_tuple:
|
||||||
# ensure that the value *can* be valid for the spec
|
# ensure that the value *can* be valid for the spec
|
||||||
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
|
if spec.name and not spec.concrete and not spec.virtual:
|
||||||
variant_defs = vt.prevalidate_variant_value(
|
variant_defs = vt.prevalidate_variant_value(
|
||||||
self.pkg_class(spec.name), variant, spec
|
self.pkg_class(spec.name), variant, spec
|
||||||
)
|
)
|
||||||
@@ -2659,7 +2252,7 @@ def define_package_versions_and_validate_preferences(
|
|||||||
):
|
):
|
||||||
"""Declare any versions in specs not declared in packages."""
|
"""Declare any versions in specs not declared in packages."""
|
||||||
packages_yaml = spack.config.get("packages")
|
packages_yaml = spack.config.get("packages")
|
||||||
for pkg_name in sorted(possible_pkgs):
|
for pkg_name in possible_pkgs:
|
||||||
pkg_cls = self.pkg_class(pkg_name)
|
pkg_cls = self.pkg_class(pkg_name)
|
||||||
|
|
||||||
# All the versions from the corresponding package.py file. Since concepts
|
# All the versions from the corresponding package.py file. Since concepts
|
||||||
@@ -2987,7 +2580,7 @@ def define_variant_values(self):
|
|||||||
"""
|
"""
|
||||||
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
||||||
# We might want to order these facts by pkg and name if we are debugging.
|
# We might want to order these facts by pkg and name if we are debugging.
|
||||||
for pkg_name, variant_def_id, value in sorted(self.variant_values_from_specs):
|
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
|
||||||
try:
|
try:
|
||||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -3025,8 +2618,6 @@ def concrete_specs(self):
|
|||||||
# Declare as possible parts of specs that are not in package.py
|
# Declare as possible parts of specs that are not in package.py
|
||||||
# - Add versions to possible versions
|
# - Add versions to possible versions
|
||||||
# - Add OS to possible OS's
|
# - Add OS to possible OS's
|
||||||
|
|
||||||
# is traverse deterministic?
|
|
||||||
for dep in spec.traverse():
|
for dep in spec.traverse():
|
||||||
self.possible_versions[dep.name].add(dep.version)
|
self.possible_versions[dep.name].add(dep.version)
|
||||||
if isinstance(dep.version, vn.GitVersion):
|
if isinstance(dep.version, vn.GitVersion):
|
||||||
@@ -3076,9 +2667,7 @@ def setup(
|
|||||||
# Fail if we already know an unreachable node is requested
|
# Fail if we already know an unreachable node is requested
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
missing_deps = [
|
missing_deps = [
|
||||||
str(d)
|
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
|
||||||
for d in spec.traverse()
|
|
||||||
if d.name not in self.pkgs and not spack.repo.PATH.is_virtual(d.name)
|
|
||||||
]
|
]
|
||||||
if missing_deps:
|
if missing_deps:
|
||||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||||
@@ -3264,7 +2853,7 @@ def define_runtime_constraints(self):
|
|||||||
recorder.consume_facts()
|
recorder.consume_facts()
|
||||||
|
|
||||||
def literal_specs(self, specs):
|
def literal_specs(self, specs):
|
||||||
for spec in sorted(specs):
|
for spec in specs:
|
||||||
self.gen.h2("Spec: %s" % str(spec))
|
self.gen.h2("Spec: %s" % str(spec))
|
||||||
condition_id = next(self._id_counter)
|
condition_id = next(self._id_counter)
|
||||||
trigger_id = next(self._id_counter)
|
trigger_id = next(self._id_counter)
|
||||||
@@ -3295,11 +2884,7 @@ def literal_specs(self, specs):
|
|||||||
pkg_name = clause.args[1]
|
pkg_name = clause.args[1]
|
||||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||||
|
|
||||||
requirements.append(
|
requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
|
||||||
fn.attr(
|
|
||||||
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
|
|
||||||
)
|
|
||||||
)
|
|
||||||
cache[imposed_spec_key] = (effect_id, requirements)
|
cache[imposed_spec_key] = (effect_id, requirements)
|
||||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||||
|
|
||||||
@@ -3765,7 +3350,7 @@ def consume_facts(self):
|
|||||||
# on the available compilers)
|
# on the available compilers)
|
||||||
self._setup.pkg_version_rules(runtime_pkg)
|
self._setup.pkg_version_rules(runtime_pkg)
|
||||||
|
|
||||||
for imposed_spec, when_spec in sorted(self.runtime_conditions):
|
for imposed_spec, when_spec in self.runtime_conditions:
|
||||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||||
|
|
||||||
@@ -4104,11 +3689,11 @@ def build_specs(self, function_tuples):
|
|||||||
roots = [spec.root for spec in self._specs.values()]
|
roots = [spec.root for spec in self._specs.values()]
|
||||||
roots = dict((id(r), r) for r in roots)
|
roots = dict((id(r), r) for r in roots)
|
||||||
for root in roots.values():
|
for root in roots.values():
|
||||||
_inject_patches_variant(root)
|
spack.spec.Spec.inject_patches_variant(root)
|
||||||
|
|
||||||
# Add external paths to specs with just external modules
|
# Add external paths to specs with just external modules
|
||||||
for s in self._specs.values():
|
for s in self._specs.values():
|
||||||
_ensure_external_path_if_external(s)
|
spack.spec.Spec.ensure_external_path_if_external(s)
|
||||||
|
|
||||||
for s in self._specs.values():
|
for s in self._specs.values():
|
||||||
_develop_specs_from_env(s, ev.active_environment())
|
_develop_specs_from_env(s, ev.active_environment())
|
||||||
@@ -4180,92 +3765,6 @@ def execute_explicit_splices(self):
|
|||||||
return specs
|
return specs
|
||||||
|
|
||||||
|
|
||||||
def _inject_patches_variant(root: spack.spec.Spec) -> None:
|
|
||||||
# This dictionary will store object IDs rather than Specs as keys
|
|
||||||
# since the Spec __hash__ will change as patches are added to them
|
|
||||||
spec_to_patches: Dict[int, Set[spack.patch.Patch]] = {}
|
|
||||||
for s in root.traverse():
|
|
||||||
# After concretizing, assign namespaces to anything left.
|
|
||||||
# Note that this doesn't count as a "change". The repository
|
|
||||||
# configuration is constant throughout a spack run, and
|
|
||||||
# normalize and concretize evaluate Packages using Repo.get(),
|
|
||||||
# which respects precedence. So, a namespace assignment isn't
|
|
||||||
# changing how a package name would have been interpreted and
|
|
||||||
# we can do it as late as possible to allow as much
|
|
||||||
# compatibility across repositories as possible.
|
|
||||||
if s.namespace is None:
|
|
||||||
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
|
||||||
|
|
||||||
if s.concrete:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add any patches from the package to the spec.
|
|
||||||
node_patches = {
|
|
||||||
patch
|
|
||||||
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items()
|
|
||||||
if s.satisfies(cond)
|
|
||||||
for patch in patch_list
|
|
||||||
}
|
|
||||||
if node_patches:
|
|
||||||
spec_to_patches[id(s)] = node_patches
|
|
||||||
|
|
||||||
# Also record all patches required on dependencies by depends_on(..., patch=...)
|
|
||||||
for dspec in root.traverse_edges(deptype=dt.ALL, cover="edges", root=False):
|
|
||||||
if dspec.spec.concrete:
|
|
||||||
continue
|
|
||||||
|
|
||||||
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
|
|
||||||
|
|
||||||
edge_patches: List[spack.patch.Patch] = []
|
|
||||||
for cond, deps_by_name in pkg_deps.items():
|
|
||||||
if not dspec.parent.satisfies(cond):
|
|
||||||
continue
|
|
||||||
|
|
||||||
dependency = deps_by_name.get(dspec.spec.name)
|
|
||||||
if not dependency:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for pcond, patch_list in dependency.patches.items():
|
|
||||||
if dspec.spec.satisfies(pcond):
|
|
||||||
edge_patches.extend(patch_list)
|
|
||||||
|
|
||||||
if edge_patches:
|
|
||||||
spec_to_patches.setdefault(id(dspec.spec), set()).update(edge_patches)
|
|
||||||
|
|
||||||
for spec in root.traverse():
|
|
||||||
if id(spec) not in spec_to_patches:
|
|
||||||
continue
|
|
||||||
|
|
||||||
patches = list(spec_to_patches[id(spec)])
|
|
||||||
variant: vt.MultiValuedVariant = spec.variants.setdefault(
|
|
||||||
"patches", vt.MultiValuedVariant("patches", ())
|
|
||||||
)
|
|
||||||
variant.value = tuple(p.sha256 for p in patches)
|
|
||||||
# FIXME: Monkey patches variant to store patches order
|
|
||||||
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
|
|
||||||
ordered_hashes.sort()
|
|
||||||
tty.debug(
|
|
||||||
f"Ordered hashes [{spec.name}]: "
|
|
||||||
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
|
|
||||||
)
|
|
||||||
setattr(
|
|
||||||
variant, "_patches_in_order_of_appearance", [sha256 for _, _, sha256 in ordered_hashes]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_external_path_if_external(spec: spack.spec.Spec) -> None:
|
|
||||||
if not spec.external_modules or spec.external_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get the path from the module the package can override the default
|
|
||||||
# (this is mostly needed for Cray)
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
|
||||||
package = pkg_cls(spec)
|
|
||||||
spec.external_path = getattr(package, "external_prefix", None) or md.path_from_modules(
|
|
||||||
spec.external_modules
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _develop_specs_from_env(spec, env):
|
def _develop_specs_from_env(spec, env):
|
||||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||||
if not dev_info:
|
if not dev_info:
|
||||||
@@ -4586,10 +4085,10 @@ def _check_input_and_extract_concrete_specs(specs):
|
|||||||
reusable = []
|
reusable = []
|
||||||
for root in specs:
|
for root in specs:
|
||||||
for s in root.traverse():
|
for s in root.traverse():
|
||||||
|
if s.virtual:
|
||||||
|
continue
|
||||||
if s.concrete:
|
if s.concrete:
|
||||||
reusable.append(s)
|
reusable.append(s)
|
||||||
elif spack.repo.PATH.is_virtual(s.name):
|
|
||||||
continue
|
|
||||||
spack.spec.Spec.ensure_valid_variants(s)
|
spack.spec.Spec.ensure_valid_variants(s)
|
||||||
return reusable
|
return reusable
|
||||||
|
|
||||||
@@ -4622,9 +4121,6 @@ def solve_with_stats(
|
|||||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests)
|
||||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||||
|
|
||||||
CONC_CACHE.flush_manifest()
|
|
||||||
CONC_CACHE.cleanup()
|
|
||||||
return self.driver.solve(
|
return self.driver.solve(
|
||||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||||
)
|
)
|
||||||
@@ -4694,9 +4190,6 @@ def solve_in_rounds(
|
|||||||
for spec in result.specs:
|
for spec in result.specs:
|
||||||
reusable_specs.extend(spec.traverse())
|
reusable_specs.extend(spec.traverse())
|
||||||
|
|
||||||
CONC_CACHE.flush_manifest()
|
|
||||||
CONC_CACHE.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||||
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
||||||
|
|||||||
@@ -265,7 +265,6 @@ error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
|||||||
% If we select a deprecated version, mark the package as deprecated
|
% If we select a deprecated version, mark the package as deprecated
|
||||||
attr("deprecated", node(ID, Package), Version) :-
|
attr("deprecated", node(ID, Package), Version) :-
|
||||||
attr("version", node(ID, Package), Version),
|
attr("version", node(ID, Package), Version),
|
||||||
not external(node(ID, Package)),
|
|
||||||
pkg_fact(Package, deprecated_version(Version)).
|
pkg_fact(Package, deprecated_version(Version)).
|
||||||
|
|
||||||
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)
|
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)
|
||||||
@@ -524,16 +523,6 @@ error(10, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
|||||||
:- attr("node", node(ID, Package)),
|
:- attr("node", node(ID, Package)),
|
||||||
not needed(node(ID, Package)).
|
not needed(node(ID, Package)).
|
||||||
|
|
||||||
|
|
||||||
% Extensions depending on each other must all extend the same node (e.g. all Python packages
|
|
||||||
% depending on each other must depend on the same Python interpreter)
|
|
||||||
error(100, "{0} and {1} must depend on the same {2}", ExtensionParent, ExtensionChild, ExtendeePackage)
|
|
||||||
:- depends_on(ExtensionParent, ExtensionChild),
|
|
||||||
attr("extends", ExtensionParent, ExtendeePackage),
|
|
||||||
depends_on(ExtensionParent, node(X, ExtendeePackage)),
|
|
||||||
depends_on(ExtensionChild, node(Y, ExtendeePackage)),
|
|
||||||
X != Y.
|
|
||||||
|
|
||||||
#defined dependency_type/2.
|
#defined dependency_type/2.
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
@@ -597,13 +586,6 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
|||||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||||
|
|
||||||
% This is needed to allow requirement on virtuals,
|
|
||||||
% when a virtual root is requested
|
|
||||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
|
||||||
:- attr("virtual_root", node(min_dupe_id, Virtual)),
|
|
||||||
attr("root", ProviderNode),
|
|
||||||
provider(ProviderNode, node(min_dupe_id, Virtual)).
|
|
||||||
|
|
||||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||||
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
|
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
|
||||||
:- node_depends_on_virtual(PackageNode, Virtual).
|
:- node_depends_on_virtual(PackageNode, Virtual).
|
||||||
@@ -960,14 +942,12 @@ error(100, "Cannot set variant '{0}' for package '{1}' because the variant condi
|
|||||||
build(node(ID, Package)).
|
build(node(ID, Package)).
|
||||||
|
|
||||||
% at most one variant value for single-valued variants.
|
% at most one variant value for single-valued variants.
|
||||||
error(100, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||||
:- attr("node", node(ID, Package)),
|
:- attr("node", node(ID, Package)),
|
||||||
node_has_variant(node(ID, Package), Variant, _),
|
node_has_variant(node(ID, Package), Variant, _),
|
||||||
variant_single_value(node(ID, Package), Variant),
|
variant_single_value(node(ID, Package), Variant),
|
||||||
attr("variant_value", node(ID, Package), Variant, Value1),
|
build(node(ID, Package)),
|
||||||
attr("variant_value", node(ID, Package), Variant, Value2),
|
2 { attr("variant_value", node(ID, Package), Variant, Value) }.
|
||||||
Value1 < Value2,
|
|
||||||
build(node(ID, Package)).
|
|
||||||
|
|
||||||
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||||
:- attr("node", node(ID, Package)),
|
:- attr("node", node(ID, Package)),
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, C
|
|||||||
condition_holds(Cause, node(CID, TriggerPkg)).
|
condition_holds(Cause, node(CID, TriggerPkg)).
|
||||||
|
|
||||||
% At most one variant value for single-valued variants
|
% At most one variant value for single-valued variants
|
||||||
error(0, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
|
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
|
||||||
:- attr("node", node(X, Package)),
|
:- attr("node", node(X, Package)),
|
||||||
node_has_variant(node(X, Package), Variant, VariantID),
|
node_has_variant(node(X, Package), Variant, VariantID),
|
||||||
variant_single_value(node(X, Package), Variant),
|
variant_single_value(node(X, Package), Variant),
|
||||||
|
|||||||
@@ -381,9 +381,7 @@ def __init__(
|
|||||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||||
|
|
||||||
self._possible_dependencies: Set[str] = set()
|
self._possible_dependencies: Set[str] = set()
|
||||||
self._possible_virtuals: Set[str] = {
|
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||||
x.name for x in specs if spack.repo.PATH.is_virtual(x.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
def possible_dependencies(self) -> Set[str]:
|
def possible_dependencies(self) -> Set[str]:
|
||||||
"""Returns the list of possible dependencies"""
|
"""Returns the list of possible dependencies"""
|
||||||
@@ -468,29 +466,16 @@ def possible_packages_facts(self, gen, fn):
|
|||||||
gen.newline()
|
gen.newline()
|
||||||
|
|
||||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||||
default = spack.config.CONFIG.get("concretizer:duplicates:max_dupes:default", 2)
|
|
||||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||||
max_dupes = spack.config.CONFIG.get(
|
gen.fact(fn.max_dupes(package_name, 2))
|
||||||
f"concretizer:duplicates:max_dupes:{package_name}", default
|
|
||||||
)
|
|
||||||
gen.fact(fn.max_dupes(package_name, max_dupes))
|
|
||||||
if max_dupes > 1:
|
|
||||||
gen.fact(fn.multiple_unification_sets(package_name))
|
gen.fact(fn.multiple_unification_sets(package_name))
|
||||||
gen.newline()
|
gen.newline()
|
||||||
|
|
||||||
gen.h2("Maximum number of nodes (link-run virtuals)")
|
gen.h2("Maximum number of nodes (virtual packages)")
|
||||||
for package_name in sorted(self._link_run_virtuals):
|
for package_name in sorted(self.possible_virtuals()):
|
||||||
gen.fact(fn.max_dupes(package_name, 1))
|
gen.fact(fn.max_dupes(package_name, 1))
|
||||||
gen.newline()
|
gen.newline()
|
||||||
|
|
||||||
gen.h2("Maximum number of nodes (other virtuals)")
|
|
||||||
for package_name in sorted(self.possible_virtuals() - self._link_run_virtuals):
|
|
||||||
max_dupes = spack.config.CONFIG.get(
|
|
||||||
f"concretizer:duplicates:max_dupes:{package_name}", default
|
|
||||||
)
|
|
||||||
gen.fact(fn.max_dupes(package_name, max_dupes))
|
|
||||||
gen.newline()
|
|
||||||
|
|
||||||
gen.h2("Possible package in link-run subDAG")
|
gen.h2("Possible package in link-run subDAG")
|
||||||
for name in sorted(self._link_run):
|
for name in sorted(self._link_run):
|
||||||
gen.fact(fn.possible_in_link_run(name))
|
gen.fact(fn.possible_in_link_run(name))
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
from spack.config import get_mark_from_yaml_data
|
||||||
|
|
||||||
|
|
||||||
class RequirementKind(enum.Enum):
|
class RequirementKind(enum.Enum):
|
||||||
@@ -66,29 +66,18 @@ def rules_from_package_py(self, pkg: spack.package_base.PackageBase) -> List[Req
|
|||||||
return rules
|
return rules
|
||||||
|
|
||||||
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
|
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
|
||||||
kind, requests = self._raw_yaml_data(virtual_str, section="require", virtual=True)
|
requirements = self.config.get("packages", {}).get(virtual_str, {}).get("require", [])
|
||||||
result = self._rules_from_requirements(virtual_str, requests, kind=kind)
|
return self._rules_from_requirements(
|
||||||
|
virtual_str, requirements, kind=RequirementKind.VIRTUAL
|
||||||
kind, requests = self._raw_yaml_data(virtual_str, section="prefer", virtual=True)
|
)
|
||||||
result.extend(self._rules_from_preferences(virtual_str, preferences=requests, kind=kind))
|
|
||||||
|
|
||||||
kind, requests = self._raw_yaml_data(virtual_str, section="conflict", virtual=True)
|
|
||||||
result.extend(self._rules_from_conflicts(virtual_str, conflicts=requests, kind=kind))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def rules_from_require(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
def rules_from_require(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||||
kind, requirements = self._raw_yaml_data(pkg.name, section="require")
|
kind, requirements = self._raw_yaml_data(pkg, section="require")
|
||||||
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
|
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
|
||||||
|
|
||||||
def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||||
kind, preferences = self._raw_yaml_data(pkg.name, section="prefer")
|
|
||||||
return self._rules_from_preferences(pkg.name, preferences=preferences, kind=kind)
|
|
||||||
|
|
||||||
def _rules_from_preferences(
|
|
||||||
self, pkg_name: str, *, preferences, kind: RequirementKind
|
|
||||||
) -> List[RequirementRule]:
|
|
||||||
result = []
|
result = []
|
||||||
|
kind, preferences = self._raw_yaml_data(pkg, section="prefer")
|
||||||
for item in preferences:
|
for item in preferences:
|
||||||
spec, condition, message = self._parse_prefer_conflict_item(item)
|
spec, condition, message = self._parse_prefer_conflict_item(item)
|
||||||
result.append(
|
result.append(
|
||||||
@@ -97,7 +86,7 @@ def _rules_from_preferences(
|
|||||||
# require:
|
# require:
|
||||||
# - any_of: [spec_str, "@:"]
|
# - any_of: [spec_str, "@:"]
|
||||||
RequirementRule(
|
RequirementRule(
|
||||||
pkg_name=pkg_name,
|
pkg_name=pkg.name,
|
||||||
policy="any_of",
|
policy="any_of",
|
||||||
requirements=[spec, spack.spec.Spec("@:")],
|
requirements=[spec, spack.spec.Spec("@:")],
|
||||||
kind=kind,
|
kind=kind,
|
||||||
@@ -108,13 +97,8 @@ def _rules_from_preferences(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||||
kind, conflicts = self._raw_yaml_data(pkg.name, section="conflict")
|
|
||||||
return self._rules_from_conflicts(pkg.name, conflicts=conflicts, kind=kind)
|
|
||||||
|
|
||||||
def _rules_from_conflicts(
|
|
||||||
self, pkg_name: str, *, conflicts, kind: RequirementKind
|
|
||||||
) -> List[RequirementRule]:
|
|
||||||
result = []
|
result = []
|
||||||
|
kind, conflicts = self._raw_yaml_data(pkg, section="conflict")
|
||||||
for item in conflicts:
|
for item in conflicts:
|
||||||
spec, condition, message = self._parse_prefer_conflict_item(item)
|
spec, condition, message = self._parse_prefer_conflict_item(item)
|
||||||
result.append(
|
result.append(
|
||||||
@@ -123,7 +107,7 @@ def _rules_from_conflicts(
|
|||||||
# require:
|
# require:
|
||||||
# - one_of: [spec_str, "@:"]
|
# - one_of: [spec_str, "@:"]
|
||||||
RequirementRule(
|
RequirementRule(
|
||||||
pkg_name=pkg_name,
|
pkg_name=pkg.name,
|
||||||
policy="one_of",
|
policy="one_of",
|
||||||
requirements=[spec, spack.spec.Spec("@:")],
|
requirements=[spec, spack.spec.Spec("@:")],
|
||||||
kind=kind,
|
kind=kind,
|
||||||
@@ -145,14 +129,10 @@ def _parse_prefer_conflict_item(self, item):
|
|||||||
message = item.get("message")
|
message = item.get("message")
|
||||||
return spec, condition, message
|
return spec, condition, message
|
||||||
|
|
||||||
def _raw_yaml_data(self, pkg_name: str, *, section: str, virtual: bool = False):
|
def _raw_yaml_data(self, pkg: spack.package_base.PackageBase, *, section: str):
|
||||||
config = self.config.get("packages")
|
config = self.config.get("packages")
|
||||||
data = config.get(pkg_name, {}).get(section, [])
|
data = config.get(pkg.name, {}).get(section, [])
|
||||||
kind = RequirementKind.PACKAGE
|
kind = RequirementKind.PACKAGE
|
||||||
|
|
||||||
if virtual:
|
|
||||||
return RequirementKind.VIRTUAL, data
|
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
data = config.get("all", {}).get(section, [])
|
data = config.get("all", {}).get(section, [])
|
||||||
kind = RequirementKind.DEFAULT
|
kind = RequirementKind.DEFAULT
|
||||||
@@ -185,8 +165,7 @@ def _rules_from_requirements(
|
|||||||
|
|
||||||
# validate specs from YAML first, and fail with line numbers if parsing fails.
|
# validate specs from YAML first, and fail with line numbers if parsing fails.
|
||||||
constraints = [
|
constraints = [
|
||||||
parse_spec_from_yaml_string(constraint, named=kind == RequirementKind.VIRTUAL)
|
parse_spec_from_yaml_string(constraint) for constraint in constraints
|
||||||
for constraint in constraints
|
|
||||||
]
|
]
|
||||||
when_str = requirement.get("when")
|
when_str = requirement.get("when")
|
||||||
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
|
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
|
||||||
@@ -224,7 +203,7 @@ def reject_requirement_constraint(
|
|||||||
s.validate_or_raise()
|
s.validate_or_raise()
|
||||||
except spack.error.SpackError as e:
|
except spack.error.SpackError as e:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"[{__name__}] Rejecting the default '{constraint}' requirement "
|
f"[SETUP] Rejecting the default '{constraint}' requirement "
|
||||||
f"on '{pkg_name}': {str(e)}",
|
f"on '{pkg_name}': {str(e)}",
|
||||||
level=2,
|
level=2,
|
||||||
)
|
)
|
||||||
@@ -232,37 +211,21 @@ def reject_requirement_constraint(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def parse_spec_from_yaml_string(string: str, *, named: bool = False) -> spack.spec.Spec:
|
def parse_spec_from_yaml_string(string: str) -> spack.spec.Spec:
|
||||||
"""Parse a spec from YAML and add file/line info to errors, if it's available.
|
"""Parse a spec from YAML and add file/line info to errors, if it's available.
|
||||||
|
|
||||||
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
|
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
|
||||||
add file/line information for debugging using file/line annotations from the string.
|
add file/line information for debugging using file/line annotations from the string.
|
||||||
|
|
||||||
Args:
|
Arguments:
|
||||||
string: a string representing a ``Spec`` from config YAML.
|
string: a string representing a ``Spec`` from config YAML.
|
||||||
named: if True, the spec must have a name
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
result = spack.spec.Spec(string)
|
return spack.spec.Spec(string)
|
||||||
except spack.error.SpecSyntaxError as e:
|
except spack.error.SpecSyntaxError as e:
|
||||||
mark = get_mark_from_yaml_data(string)
|
mark = get_mark_from_yaml_data(string)
|
||||||
if mark:
|
if mark:
|
||||||
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
|
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
|
||||||
raise spack.error.SpecSyntaxError(msg) from e
|
raise spack.error.SpecSyntaxError(msg) from e
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
if named is True and not result.name:
|
|
||||||
msg = f"expected a named spec, but got '{string}' instead"
|
|
||||||
mark = get_mark_from_yaml_data(string)
|
|
||||||
|
|
||||||
# Add a hint in case it's dependencies
|
|
||||||
deps = result.dependencies()
|
|
||||||
if len(deps) == 1:
|
|
||||||
msg = f"{msg}. Did you mean '{deps[0]}'?"
|
|
||||||
|
|
||||||
if mark:
|
|
||||||
msg = f"{mark.name}:{mark.line + 1}: {msg}"
|
|
||||||
|
|
||||||
raise spack.error.SpackError(msg)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|||||||
@@ -52,7 +52,6 @@
|
|||||||
import enum
|
import enum
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
@@ -97,7 +96,9 @@
|
|||||||
import spack.spec_parser
|
import spack.spec_parser
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse
|
import spack.traverse
|
||||||
|
import spack.util.executable
|
||||||
import spack.util.hash
|
import spack.util.hash
|
||||||
|
import spack.util.module_cmd as md
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -174,17 +175,15 @@
|
|||||||
#: Spec(Spec("string").format()) == Spec("string)"
|
#: Spec(Spec("string").format()) == Spec("string)"
|
||||||
DEFAULT_FORMAT = (
|
DEFAULT_FORMAT = (
|
||||||
"{name}{@versions}"
|
"{name}{@versions}"
|
||||||
"{compiler_flags}"
|
"{%compiler.name}{@compiler.versions}{compiler_flags}"
|
||||||
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
|
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
|
||||||
" {%compiler.name}{@compiler.versions}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
#: Display format, which eliminates extra `@=` in the output, for readability.
|
#: Display format, which eliminates extra `@=` in the output, for readability.
|
||||||
DISPLAY_FORMAT = (
|
DISPLAY_FORMAT = (
|
||||||
"{name}{@version}"
|
"{name}{@version}"
|
||||||
"{compiler_flags}"
|
"{%compiler.name}{@compiler.version}{compiler_flags}"
|
||||||
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
|
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
|
||||||
" {%compiler.name}{@compiler.version}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
#: Regular expression to pull spec contents out of clearsigned signature
|
#: Regular expression to pull spec contents out of clearsigned signature
|
||||||
@@ -799,7 +798,7 @@ def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
|||||||
self.depflag = new
|
self.depflag = new
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def update_virtuals(self, virtuals: Iterable[str]) -> bool:
|
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||||
"""Update the list of provided virtuals"""
|
"""Update the list of provided virtuals"""
|
||||||
old = self.virtuals
|
old = self.virtuals
|
||||||
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
||||||
@@ -1109,6 +1108,28 @@ def clear(self):
|
|||||||
self.edges.clear()
|
self.edges.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def _command_default_handler(spec: "Spec"):
|
||||||
|
"""Default handler when looking for the 'command' attribute.
|
||||||
|
|
||||||
|
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
spec: spec that is being queried
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Executable: An executable of the command
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If the command is not found
|
||||||
|
"""
|
||||||
|
home = getattr(spec.package, "home")
|
||||||
|
path = os.path.join(home.bin, spec.name)
|
||||||
|
|
||||||
|
if fs.is_exe(path):
|
||||||
|
return spack.util.executable.Executable(path)
|
||||||
|
raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
|
||||||
|
|
||||||
|
|
||||||
def _headers_default_handler(spec: "Spec"):
|
def _headers_default_handler(spec: "Spec"):
|
||||||
"""Default handler when looking for the 'headers' attribute.
|
"""Default handler when looking for the 'headers' attribute.
|
||||||
|
|
||||||
@@ -1312,22 +1333,18 @@ class SpecBuildInterface(lang.ObjectWrapper):
|
|||||||
home = ForwardQueryToPackage("home", default_handler=None)
|
home = ForwardQueryToPackage("home", default_handler=None)
|
||||||
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
|
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
|
||||||
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
|
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
|
||||||
command = ForwardQueryToPackage("command", default_handler=None, _indirect=True)
|
command = ForwardQueryToPackage(
|
||||||
|
"command", default_handler=_command_default_handler, _indirect=True
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
|
||||||
self,
|
|
||||||
spec: "Spec",
|
|
||||||
name: str,
|
|
||||||
query_parameters: List[str],
|
|
||||||
_parent: "Spec",
|
|
||||||
is_virtual: bool,
|
|
||||||
):
|
|
||||||
super().__init__(spec)
|
super().__init__(spec)
|
||||||
# Adding new attributes goes after super() call since the ObjectWrapper
|
# Adding new attributes goes after super() call since the ObjectWrapper
|
||||||
# resets __dict__ to behave like the passed object
|
# resets __dict__ to behave like the passed object
|
||||||
original_spec = getattr(spec, "wrapped_obj", spec)
|
original_spec = getattr(spec, "wrapped_obj", spec)
|
||||||
self.wrapped_obj = original_spec
|
self.wrapped_obj = original_spec
|
||||||
self.token = original_spec, name, query_parameters, _parent, is_virtual
|
self.token = original_spec, name, query_parameters, _parent
|
||||||
|
is_virtual = spack.repo.PATH.is_virtual(name)
|
||||||
self.last_query = QueryState(
|
self.last_query = QueryState(
|
||||||
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
|
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
|
||||||
)
|
)
|
||||||
@@ -1490,7 +1507,7 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
|
|||||||
self.abstract_hash = None
|
self.abstract_hash = None
|
||||||
|
|
||||||
# initial values for all spec hash types
|
# initial values for all spec hash types
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, None)
|
setattr(self, h.attr, None)
|
||||||
|
|
||||||
# cache for spec's prefix, computed lazily by prefix property
|
# cache for spec's prefix, computed lazily by prefix property
|
||||||
@@ -1898,12 +1915,6 @@ def package_class(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def virtual(self):
|
def virtual(self):
|
||||||
warnings.warn(
|
|
||||||
"`Spec.virtual` is deprecated and will be removed in version 1.0.0. Use "
|
|
||||||
"`spack.repo.PATH.is_virtual(spec.name)` instead.",
|
|
||||||
category=spack.error.SpackAPIWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return spack.repo.PATH.is_virtual(self.name)
|
return spack.repo.PATH.is_virtual(self.name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2083,34 +2094,32 @@ def long_spec(self):
|
|||||||
def short_spec(self):
|
def short_spec(self):
|
||||||
"""Returns a version of the spec with the dependencies hashed
|
"""Returns a version of the spec with the dependencies hashed
|
||||||
instead of completely enumerated."""
|
instead of completely enumerated."""
|
||||||
return self.format(
|
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||||
"{name}{@version}{variants}{ arch=architecture}"
|
spec_format += "{variants}{ arch=architecture}{/hash:7}"
|
||||||
"{/hash:7}{%compiler.name}{@compiler.version}"
|
return self.format(spec_format)
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cshort_spec(self):
|
def cshort_spec(self):
|
||||||
"""Returns an auto-colorized version of ``self.short_spec``."""
|
"""Returns an auto-colorized version of ``self.short_spec``."""
|
||||||
return self.cformat(
|
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||||
"{name}{@version}{variants}{ arch=architecture}"
|
spec_format += "{variants}{ arch=architecture}{/hash:7}"
|
||||||
"{/hash:7}{%compiler.name}{@compiler.version}"
|
return self.cformat(spec_format)
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def prefix(self) -> spack.util.prefix.Prefix:
|
def prefix(self):
|
||||||
if not self._concrete:
|
if not self._concrete:
|
||||||
raise spack.error.SpecError(f"Spec is not concrete: {self}")
|
raise spack.error.SpecError("Spec is not concrete: " + str(self))
|
||||||
|
|
||||||
if self._prefix is None:
|
if self._prefix is None:
|
||||||
_, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
||||||
if record and record.path:
|
if record and record.path:
|
||||||
self.set_prefix(record.path)
|
self.prefix = record.path
|
||||||
else:
|
else:
|
||||||
self.set_prefix(spack.store.STORE.layout.path_for_spec(self))
|
self.prefix = spack.store.STORE.layout.path_for_spec(self)
|
||||||
assert self._prefix is not None
|
|
||||||
return self._prefix
|
return self._prefix
|
||||||
|
|
||||||
def set_prefix(self, value: str) -> None:
|
@prefix.setter
|
||||||
|
def prefix(self, value):
|
||||||
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
|
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
|
||||||
|
|
||||||
def spec_hash(self, hash):
|
def spec_hash(self, hash):
|
||||||
@@ -2124,9 +2133,7 @@ def spec_hash(self, hash):
|
|||||||
if hash.override is not None:
|
if hash.override is not None:
|
||||||
return hash.override(self)
|
return hash.override(self)
|
||||||
node_dict = self.to_node_dict(hash=hash)
|
node_dict = self.to_node_dict(hash=hash)
|
||||||
json_text = json.dumps(
|
json_text = sjson.dump(node_dict)
|
||||||
node_dict, ensure_ascii=True, indent=None, separators=(",", ":"), sort_keys=False
|
|
||||||
)
|
|
||||||
# This implements "frankenhashes", preserving the last 7 characters of the
|
# This implements "frankenhashes", preserving the last 7 characters of the
|
||||||
# original hash when splicing so that we can avoid relocation issues
|
# original hash when splicing so that we can avoid relocation issues
|
||||||
out = spack.util.hash.b32_hash(json_text)
|
out = spack.util.hash.b32_hash(json_text)
|
||||||
@@ -2173,16 +2180,30 @@ def package_hash(self):
|
|||||||
def dag_hash(self, length=None):
|
def dag_hash(self, length=None):
|
||||||
"""This is Spack's default hash, used to identify installations.
|
"""This is Spack's default hash, used to identify installations.
|
||||||
|
|
||||||
|
Same as the full hash (includes package hash and build/link/run deps).
|
||||||
|
Tells us when package files and any dependencies have changes.
|
||||||
|
|
||||||
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
||||||
NOTE: Versions of Spack prior to 1.0 only did not include test deps.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self._cached_hash(ht.dag_hash, length)
|
return self._cached_hash(ht.dag_hash, length)
|
||||||
|
|
||||||
|
def process_hash(self, length=None):
|
||||||
|
"""Hash used to transfer specs among processes.
|
||||||
|
|
||||||
|
This hash includes build and test dependencies and is only used to
|
||||||
|
serialize a spec and pass it around among processes.
|
||||||
|
"""
|
||||||
|
return self._cached_hash(ht.process_hash, length)
|
||||||
|
|
||||||
def dag_hash_bit_prefix(self, bits):
|
def dag_hash_bit_prefix(self, bits):
|
||||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||||
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
||||||
|
|
||||||
|
def process_hash_bit_prefix(self, bits):
|
||||||
|
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||||
|
return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
|
||||||
|
|
||||||
def _lookup_hash(self):
|
def _lookup_hash(self):
|
||||||
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
|
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
|
||||||
store, or finally, binary caches."""
|
store, or finally, binary caches."""
|
||||||
@@ -2673,7 +2694,7 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
|
|||||||
return name, depflag
|
return name, depflag
|
||||||
|
|
||||||
def spec_and_dependency_types(
|
def spec_and_dependency_types(
|
||||||
s: Union[Spec, Tuple[Spec, str]],
|
s: Union[Spec, Tuple[Spec, str]]
|
||||||
) -> Tuple[Spec, dt.DepFlag]:
|
) -> Tuple[Spec, dt.DepFlag]:
|
||||||
"""Given a non-string key in the literal, extracts the spec
|
"""Given a non-string key in the literal, extracts the spec
|
||||||
and its dependency types.
|
and its dependency types.
|
||||||
@@ -2702,7 +2723,7 @@ def spec_and_dependency_types(
|
|||||||
return spec_builder(spec_dict)
|
return spec_builder(spec_dict)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(data) -> "Spec":
|
def from_dict(data):
|
||||||
"""Construct a spec from JSON/YAML.
|
"""Construct a spec from JSON/YAML.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2725,7 +2746,7 @@ def from_dict(data) -> "Spec":
|
|||||||
return spec
|
return spec
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_yaml(stream) -> "Spec":
|
def from_yaml(stream):
|
||||||
"""Construct a spec from YAML.
|
"""Construct a spec from YAML.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2735,7 +2756,7 @@ def from_yaml(stream) -> "Spec":
|
|||||||
return Spec.from_dict(data)
|
return Spec.from_dict(data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_json(stream) -> "Spec":
|
def from_json(stream):
|
||||||
"""Construct a spec from JSON.
|
"""Construct a spec from JSON.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -2745,7 +2766,7 @@ def from_json(stream) -> "Spec":
|
|||||||
data = sjson.load(stream)
|
data = sjson.load(stream)
|
||||||
return Spec.from_dict(data)
|
return Spec.from_dict(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise sjson.SpackJSONError("error parsing JSON spec:", e) from e
|
raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def extract_json_from_clearsig(data):
|
def extract_json_from_clearsig(data):
|
||||||
@@ -2793,6 +2814,24 @@ def from_detection(
|
|||||||
s.extra_attributes = extra_attributes
|
s.extra_attributes = extra_attributes
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
def validate_detection(self):
|
||||||
|
"""Validate the detection of an external spec.
|
||||||
|
|
||||||
|
This method is used as part of Spack's detection protocol, and is
|
||||||
|
not meant for client code use.
|
||||||
|
"""
|
||||||
|
# Assert that _extra_attributes is a Mapping and not None,
|
||||||
|
# which likely means the spec was created with Spec.from_detection
|
||||||
|
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
|
||||||
|
self
|
||||||
|
)
|
||||||
|
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
|
||||||
|
|
||||||
|
# Validate the spec calling a package specific method
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||||
|
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
|
||||||
|
validate_fn(self, self.extra_attributes)
|
||||||
|
|
||||||
def _patches_assigned(self):
|
def _patches_assigned(self):
|
||||||
"""Whether patches have been assigned to this spec by the concretizer."""
|
"""Whether patches have been assigned to this spec by the concretizer."""
|
||||||
# FIXME: _patches_in_order_of_appearance is attached after concretization
|
# FIXME: _patches_in_order_of_appearance is attached after concretization
|
||||||
@@ -2809,6 +2848,94 @@ def _patches_assigned(self):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def inject_patches_variant(root):
|
||||||
|
# This dictionary will store object IDs rather than Specs as keys
|
||||||
|
# since the Spec __hash__ will change as patches are added to them
|
||||||
|
spec_to_patches = {}
|
||||||
|
for s in root.traverse():
|
||||||
|
# After concretizing, assign namespaces to anything left.
|
||||||
|
# Note that this doesn't count as a "change". The repository
|
||||||
|
# configuration is constant throughout a spack run, and
|
||||||
|
# normalize and concretize evaluate Packages using Repo.get(),
|
||||||
|
# which respects precedence. So, a namespace assignment isn't
|
||||||
|
# changing how a package name would have been interpreted and
|
||||||
|
# we can do it as late as possible to allow as much
|
||||||
|
# compatibility across repositories as possible.
|
||||||
|
if s.namespace is None:
|
||||||
|
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
||||||
|
|
||||||
|
if s.concrete:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add any patches from the package to the spec.
|
||||||
|
patches = set()
|
||||||
|
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
|
||||||
|
if s.satisfies(cond):
|
||||||
|
for patch in patch_list:
|
||||||
|
patches.add(patch)
|
||||||
|
if patches:
|
||||||
|
spec_to_patches[id(s)] = patches
|
||||||
|
|
||||||
|
# Also record all patches required on dependencies by
|
||||||
|
# depends_on(..., patch=...)
|
||||||
|
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
||||||
|
if dspec.spec.concrete:
|
||||||
|
continue
|
||||||
|
|
||||||
|
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
|
||||||
|
|
||||||
|
patches = []
|
||||||
|
for cond, deps_by_name in pkg_deps.items():
|
||||||
|
if not dspec.parent.satisfies(cond):
|
||||||
|
continue
|
||||||
|
|
||||||
|
dependency = deps_by_name.get(dspec.spec.name)
|
||||||
|
if not dependency:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for pcond, patch_list in dependency.patches.items():
|
||||||
|
if dspec.spec.satisfies(pcond):
|
||||||
|
patches.extend(patch_list)
|
||||||
|
|
||||||
|
if patches:
|
||||||
|
all_patches = spec_to_patches.setdefault(id(dspec.spec), set())
|
||||||
|
for patch in patches:
|
||||||
|
all_patches.add(patch)
|
||||||
|
|
||||||
|
for spec in root.traverse():
|
||||||
|
if id(spec) not in spec_to_patches:
|
||||||
|
continue
|
||||||
|
|
||||||
|
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
|
||||||
|
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||||
|
mvar.value = tuple(p.sha256 for p in patches)
|
||||||
|
# FIXME: Monkey patches mvar to store patches order
|
||||||
|
full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
|
||||||
|
ordered_hashes = sorted(full_order_keys)
|
||||||
|
tty.debug(
|
||||||
|
"Ordered hashes [{0}]: ".format(spec.name)
|
||||||
|
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
|
||||||
|
)
|
||||||
|
mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ensure_external_path_if_external(external_spec):
|
||||||
|
if external_spec.external_modules and not external_spec.external_path:
|
||||||
|
compiler = spack.compilers.compiler_for_spec(
|
||||||
|
external_spec.compiler, external_spec.architecture
|
||||||
|
)
|
||||||
|
for mod in compiler.modules:
|
||||||
|
md.load_module(mod)
|
||||||
|
|
||||||
|
# Get the path from the module the package can override the default
|
||||||
|
# (this is mostly needed for Cray)
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
|
||||||
|
package = pkg_cls(external_spec)
|
||||||
|
external_spec.external_path = getattr(
|
||||||
|
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ensure_no_deprecated(root):
|
def ensure_no_deprecated(root):
|
||||||
"""Raise if a deprecated spec is in the dag.
|
"""Raise if a deprecated spec is in the dag.
|
||||||
@@ -2963,7 +3090,7 @@ def validate_or_raise(self):
|
|||||||
# FIXME: raise just the first one encountered
|
# FIXME: raise just the first one encountered
|
||||||
for spec in self.traverse():
|
for spec in self.traverse():
|
||||||
# raise an UnknownPackageError if the spec's package isn't real.
|
# raise an UnknownPackageError if the spec's package isn't real.
|
||||||
if spec.name and not spack.repo.PATH.is_virtual(spec.name):
|
if (not spec.virtual) and spec.name:
|
||||||
spack.repo.PATH.get_pkg_class(spec.fullname)
|
spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||||
|
|
||||||
# validate compiler in addition to the package name.
|
# validate compiler in addition to the package name.
|
||||||
@@ -2972,7 +3099,7 @@ def validate_or_raise(self):
|
|||||||
raise UnsupportedCompilerError(spec.compiler.name)
|
raise UnsupportedCompilerError(spec.compiler.name)
|
||||||
|
|
||||||
# Ensure correctness of variants (if the spec is not virtual)
|
# Ensure correctness of variants (if the spec is not virtual)
|
||||||
if not spack.repo.PATH.is_virtual(spec.name):
|
if not spec.virtual:
|
||||||
Spec.ensure_valid_variants(spec)
|
Spec.ensure_valid_variants(spec)
|
||||||
substitute_abstract_variants(spec)
|
substitute_abstract_variants(spec)
|
||||||
|
|
||||||
@@ -3207,9 +3334,7 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
|
|
||||||
# If the names are different, we need to consider virtuals
|
# If the names are different, we need to consider virtuals
|
||||||
if self.name != other.name and self.name and other.name:
|
if self.name != other.name and self.name and other.name:
|
||||||
self_virtual = spack.repo.PATH.is_virtual(self.name)
|
if self.virtual and other.virtual:
|
||||||
other_virtual = spack.repo.PATH.is_virtual(other.name)
|
|
||||||
if self_virtual and other_virtual:
|
|
||||||
# Two virtual specs intersect only if there are providers for both
|
# Two virtual specs intersect only if there are providers for both
|
||||||
lhs = spack.repo.PATH.providers_for(str(self))
|
lhs = spack.repo.PATH.providers_for(str(self))
|
||||||
rhs = spack.repo.PATH.providers_for(str(other))
|
rhs = spack.repo.PATH.providers_for(str(other))
|
||||||
@@ -3217,8 +3342,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
return bool(intersection)
|
return bool(intersection)
|
||||||
|
|
||||||
# A provider can satisfy a virtual dependency.
|
# A provider can satisfy a virtual dependency.
|
||||||
elif self_virtual or other_virtual:
|
elif self.virtual or other.virtual:
|
||||||
virtual_spec, non_virtual_spec = (self, other) if self_virtual else (other, self)
|
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
|
||||||
try:
|
try:
|
||||||
# Here we might get an abstract spec
|
# Here we might get an abstract spec
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
|
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
|
||||||
@@ -3288,20 +3413,12 @@ def _intersects_dependencies(self, other):
|
|||||||
# These two loops handle cases where there is an overly restrictive
|
# These two loops handle cases where there is an overly restrictive
|
||||||
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
|
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
|
||||||
# compatible with mpich2)
|
# compatible with mpich2)
|
||||||
for spec in self.traverse():
|
for spec in self.virtual_dependencies():
|
||||||
if (
|
if spec.name in other_index and not other_index.providers_for(spec):
|
||||||
spack.repo.PATH.is_virtual(spec.name)
|
|
||||||
and spec.name in other_index
|
|
||||||
and not other_index.providers_for(spec)
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for spec in other.traverse():
|
for spec in other.virtual_dependencies():
|
||||||
if (
|
if spec.name in self_index and not self_index.providers_for(spec):
|
||||||
spack.repo.PATH.is_virtual(spec.name)
|
|
||||||
and spec.name in self_index
|
|
||||||
and not self_index.providers_for(spec)
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -3331,9 +3448,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
# If the names are different, we need to consider virtuals
|
# If the names are different, we need to consider virtuals
|
||||||
if self.name != other.name and self.name and other.name:
|
if self.name != other.name and self.name and other.name:
|
||||||
# A concrete provider can satisfy a virtual dependency.
|
# A concrete provider can satisfy a virtual dependency.
|
||||||
if not spack.repo.PATH.is_virtual(self.name) and spack.repo.PATH.is_virtual(
|
if not self.virtual and other.virtual:
|
||||||
other.name
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
# Here we might get an abstract spec
|
# Here we might get an abstract spec
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
|
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
|
||||||
@@ -3401,7 +3516,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||||
# If we are checking for ^mpi we need to verify if there is any edge
|
# If we are checking for ^mpi we need to verify if there is any edge
|
||||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
if rhs_edge.spec.virtual:
|
||||||
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
|
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
|
||||||
|
|
||||||
if not rhs_edge.virtuals:
|
if not rhs_edge.virtuals:
|
||||||
@@ -3445,6 +3560,10 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
for rhs in other.traverse(root=False)
|
for rhs in other.traverse(root=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def virtual_dependencies(self):
|
||||||
|
"""Return list of any virtual deps in this spec."""
|
||||||
|
return [spec for spec in self.traverse() if spec.virtual]
|
||||||
|
|
||||||
@property # type: ignore[misc] # decorated prop not supported in mypy
|
@property # type: ignore[misc] # decorated prop not supported in mypy
|
||||||
def patches(self):
|
def patches(self):
|
||||||
"""Return patch objects for any patch sha256 sums on this Spec.
|
"""Return patch objects for any patch sha256 sums on this Spec.
|
||||||
@@ -3549,11 +3668,11 @@ def _dup(self, other: "Spec", deps: Union[bool, dt.DepTypes, dt.DepFlag] = True)
|
|||||||
|
|
||||||
if self._concrete:
|
if self._concrete:
|
||||||
self._dunder_hash = other._dunder_hash
|
self._dunder_hash = other._dunder_hash
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, getattr(other, h.attr, None))
|
setattr(self, h.attr, getattr(other, h.attr, None))
|
||||||
else:
|
else:
|
||||||
self._dunder_hash = None
|
self._dunder_hash = None
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, None)
|
setattr(self, h.attr, None)
|
||||||
|
|
||||||
return changed
|
return changed
|
||||||
@@ -3634,23 +3753,30 @@ def __getitem__(self, name: str):
|
|||||||
csv = query_parameters.pop().strip()
|
csv = query_parameters.pop().strip()
|
||||||
query_parameters = re.split(r"\s*,\s*", csv)
|
query_parameters = re.split(r"\s*,\s*", csv)
|
||||||
|
|
||||||
# Consider all direct dependencies and transitive runtime dependencies
|
order = lambda: itertools.chain(
|
||||||
order = itertools.chain(
|
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
|
||||||
self.edges_to_dependencies(depflag=dt.ALL),
|
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
|
||||||
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
|
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
|
||||||
|
# and prefer matches closest to the root.
|
||||||
try:
|
try:
|
||||||
edge = next((e for e in order if e.spec.name == name or name in e.virtuals))
|
child: Spec = next(
|
||||||
except StopIteration as e:
|
e.spec
|
||||||
raise KeyError(f"No spec with name {name} in {self}") from e
|
for e in itertools.chain(
|
||||||
|
(e for e in order() if e.spec.name == name or name in e.virtuals),
|
||||||
|
# for historical reasons
|
||||||
|
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except StopIteration:
|
||||||
|
raise KeyError(f"No spec with name {name} in {self}")
|
||||||
|
|
||||||
if self._concrete:
|
if self._concrete:
|
||||||
return SpecBuildInterface(
|
return SpecBuildInterface(child, name, query_parameters, _parent=self)
|
||||||
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
|
|
||||||
)
|
|
||||||
|
|
||||||
return edge.spec
|
return child
|
||||||
|
|
||||||
def __contains__(self, spec):
|
def __contains__(self, spec):
|
||||||
"""True if this spec or some dependency satisfies the spec.
|
"""True if this spec or some dependency satisfies the spec.
|
||||||
@@ -3666,11 +3792,8 @@ def __contains__(self, spec):
|
|||||||
# if anonymous or same name, we only have to look at the root
|
# if anonymous or same name, we only have to look at the root
|
||||||
if not spec.name or spec.name == self.name:
|
if not spec.name or spec.name == self.name:
|
||||||
return self.satisfies(spec)
|
return self.satisfies(spec)
|
||||||
try:
|
else:
|
||||||
dep = self[spec.name]
|
return any(s.satisfies(spec) for s in self.traverse(root=False))
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
return dep.satisfies(spec)
|
|
||||||
|
|
||||||
def eq_dag(self, other, deptypes=True, vs=None, vo=None):
|
def eq_dag(self, other, deptypes=True, vs=None, vo=None):
|
||||||
"""True if the full dependency DAGs of specs are equal."""
|
"""True if the full dependency DAGs of specs are equal."""
|
||||||
@@ -3745,6 +3868,16 @@ def _cmp_iter(self):
|
|||||||
# serialized before the hash change and one after, are considered different.
|
# serialized before the hash change and one after, are considered different.
|
||||||
yield self.dag_hash() if self.concrete else None
|
yield self.dag_hash() if self.concrete else None
|
||||||
|
|
||||||
|
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||||
|
# are considered the same by `__hash__` or `__eq__`.
|
||||||
|
#
|
||||||
|
# TODO: We should eventually unify the `_cmp_*` methods with `to_node_dict` so
|
||||||
|
# TODO: there aren't two sources of truth, but this needs some thought, since
|
||||||
|
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||||
|
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||||
|
# TODO: spec hashing.
|
||||||
|
yield self.process_hash() if self.concrete else None
|
||||||
|
|
||||||
def deps():
|
def deps():
|
||||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||||
yield dep.spec.name
|
yield dep.spec.name
|
||||||
@@ -4398,7 +4531,7 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
|
|||||||
"""
|
"""
|
||||||
Clears all cached hashes in a Spec, while preserving other properties.
|
Clears all cached hashes in a Spec, while preserving other properties.
|
||||||
"""
|
"""
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
if h.attr not in ignore:
|
if h.attr not in ignore:
|
||||||
if hasattr(self, h.attr):
|
if hasattr(self, h.attr):
|
||||||
setattr(self, h.attr, None)
|
setattr(self, h.attr, None)
|
||||||
@@ -4407,12 +4540,18 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
|
|||||||
setattr(self, attr, None)
|
setattr(self, attr, None)
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
# If the spec is concrete, we leverage the dag hash and just use a 64-bit prefix of it.
|
# If the spec is concrete, we leverage the process hash and just use
|
||||||
# The dag hash has the advantage that it's computed once per concrete spec, and it's saved
|
# a 64-bit prefix of it. The process hash has the advantage that it's
|
||||||
# -- so if we read concrete specs we don't need to recompute the whole hash.
|
# computed once per concrete spec, and it's saved -- so if we read
|
||||||
|
# concrete specs we don't need to recompute the whole hash. This is
|
||||||
|
# good for large, unchanging specs.
|
||||||
|
#
|
||||||
|
# We use the process hash instead of the DAG hash here because the DAG
|
||||||
|
# hash includes the package hash, which can cause infinite recursion,
|
||||||
|
# and which isn't defined unless the spec has a known package.
|
||||||
if self.concrete:
|
if self.concrete:
|
||||||
if not self._dunder_hash:
|
if not self._dunder_hash:
|
||||||
self._dunder_hash = self.dag_hash_bit_prefix(64)
|
self._dunder_hash = self.process_hash_bit_prefix(64)
|
||||||
return self._dunder_hash
|
return self._dunder_hash
|
||||||
|
|
||||||
# This is the normal hash for lazy_lexicographic_ordering. It's
|
# This is the normal hash for lazy_lexicographic_ordering. It's
|
||||||
@@ -4421,7 +4560,7 @@ def __hash__(self):
|
|||||||
return hash(lang.tuplify(self._cmp_iter))
|
return hash(lang.tuplify(self._cmp_iter))
|
||||||
|
|
||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
return Spec.from_dict, (self.to_dict(hash=ht.dag_hash),)
|
return Spec.from_dict, (self.to_dict(hash=ht.process_hash),)
|
||||||
|
|
||||||
def attach_git_version_lookup(self):
|
def attach_git_version_lookup(self):
|
||||||
# Add a git lookup method for GitVersions
|
# Add a git lookup method for GitVersions
|
||||||
@@ -4564,6 +4703,17 @@ def constrain(self, other: "VariantMap") -> bool:
|
|||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def concrete(self):
|
||||||
|
"""Returns True if the spec is concrete in terms of variants.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True or False
|
||||||
|
"""
|
||||||
|
return self.spec._concrete or all(
|
||||||
|
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
|
||||||
|
)
|
||||||
|
|
||||||
def copy(self) -> "VariantMap":
|
def copy(self) -> "VariantMap":
|
||||||
clone = VariantMap(self.spec)
|
clone = VariantMap(self.spec)
|
||||||
for name, variant in self.items():
|
for name, variant in self.items():
|
||||||
@@ -4690,51 +4840,31 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
|||||||
return merged_spec
|
return merged_spec
|
||||||
|
|
||||||
|
|
||||||
def reconstruct_virtuals_on_edges(spec: Spec) -> None:
|
def reconstruct_virtuals_on_edges(spec):
|
||||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex."""
|
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||||
virtuals_needed: Dict[str, Set[str]] = {}
|
|
||||||
virtuals_provided: Dict[str, Set[str]] = {}
|
Args:
|
||||||
for edge in spec.traverse_edges(cover="edges", root=False):
|
spec: spec on which we want to reconstruct virtuals
|
||||||
parent_key = edge.parent.dag_hash()
|
"""
|
||||||
if parent_key not in virtuals_needed:
|
# Collect all possible virtuals
|
||||||
# Construct which virtuals are needed by parent
|
possible_virtuals = set()
|
||||||
virtuals_needed[parent_key] = set()
|
for node in spec.traverse():
|
||||||
try:
|
try:
|
||||||
parent_pkg = edge.parent.package
|
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
warnings.warn(
|
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||||
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
virtuals_needed[parent_key].update(
|
# Assume all incoming edges to provider are marked with virtuals=
|
||||||
name
|
for vspec in possible_virtuals:
|
||||||
for name, when_deps in parent_pkg.dependencies_by_name(when=True).items()
|
|
||||||
if spack.repo.PATH.is_virtual(name)
|
|
||||||
and any(edge.parent.satisfies(x) for x in when_deps)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not virtuals_needed[parent_key]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
child_key = edge.spec.dag_hash()
|
|
||||||
if child_key not in virtuals_provided:
|
|
||||||
virtuals_provided[child_key] = set()
|
|
||||||
try:
|
try:
|
||||||
child_pkg = edge.spec.package
|
provider = spec[vspec]
|
||||||
except Exception as e:
|
except KeyError:
|
||||||
warnings.warn(
|
# Virtual not in the DAG
|
||||||
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
virtuals_provided[child_key].update(x.name for x in child_pkg.virtuals_provided)
|
|
||||||
|
|
||||||
if not virtuals_provided[child_key]:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
virtuals_to_add = virtuals_needed[parent_key] & virtuals_provided[child_key]
|
for edge in provider.edges_from_dependents():
|
||||||
if virtuals_to_add:
|
edge.update_virtuals([vspec])
|
||||||
edge.update_virtuals(virtuals_to_add)
|
|
||||||
|
|
||||||
|
|
||||||
class SpecfileReaderBase:
|
class SpecfileReaderBase:
|
||||||
@@ -4743,7 +4873,7 @@ def from_node_dict(cls, node):
|
|||||||
spec = Spec()
|
spec = Spec()
|
||||||
|
|
||||||
name, node = cls.name_and_data(node)
|
name, node = cls.name_and_data(node)
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
setattr(spec, h.attr, node.get(h.name, None))
|
setattr(spec, h.attr, node.get(h.name, None))
|
||||||
|
|
||||||
spec.name = name
|
spec.name = name
|
||||||
@@ -4926,7 +5056,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
|||||||
"""
|
"""
|
||||||
for dep_name, elt in deps.items():
|
for dep_name, elt in deps.items():
|
||||||
if isinstance(elt, dict):
|
if isinstance(elt, dict):
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
if h.name in elt:
|
if h.name in elt:
|
||||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||||
hash_type = h.name
|
hash_type = h.name
|
||||||
@@ -4969,7 +5099,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
|||||||
dep_name = dep["name"]
|
dep_name = dep["name"]
|
||||||
if isinstance(elt, dict):
|
if isinstance(elt, dict):
|
||||||
# new format: elements of dependency spec are keyed.
|
# new format: elements of dependency spec are keyed.
|
||||||
for h in ht.HASHES:
|
for h in ht.hashes:
|
||||||
if h.name in elt:
|
if h.name in elt:
|
||||||
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
||||||
break
|
break
|
||||||
@@ -5079,13 +5209,6 @@ def get_host_environment() -> Dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def eval_conditional(string):
|
|
||||||
"""Evaluate conditional definitions using restricted variable scope."""
|
|
||||||
valid_variables = get_host_environment()
|
|
||||||
valid_variables.update({"re": re, "env": os.environ})
|
|
||||||
return eval(string, valid_variables)
|
|
||||||
|
|
||||||
|
|
||||||
class SpecParseError(spack.error.SpecError):
|
class SpecParseError(spack.error.SpecError):
|
||||||
"""Wrapper for ParseError for when we're parsing specs."""
|
"""Wrapper for ParseError for when we're parsing specs."""
|
||||||
|
|
||||||
@@ -5244,10 +5367,8 @@ def __init__(self, spec):
|
|||||||
|
|
||||||
class AmbiguousHashError(spack.error.SpecError):
|
class AmbiguousHashError(spack.error.SpecError):
|
||||||
def __init__(self, msg, *specs):
|
def __init__(self, msg, *specs):
|
||||||
spec_fmt = (
|
spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
|
||||||
"{namespace}.{name}{@version}{compiler_flags}{variants}"
|
spec_fmt += "{variants}{ arch=architecture}{/hash:7}"
|
||||||
"{ arch=architecture}{/hash:7}{%compiler}"
|
|
||||||
)
|
|
||||||
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
|
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
|
||||||
super().__init__(msg + specs_str)
|
super().__init__(msg + specs_str)
|
||||||
|
|
||||||
|
|||||||
@@ -60,17 +60,13 @@
|
|||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
from typing import Iterator, List, Optional
|
||||||
import warnings
|
|
||||||
from typing import Iterator, List, Optional, Tuple
|
|
||||||
|
|
||||||
from llnl.util.tty import color
|
from llnl.util.tty import color
|
||||||
|
|
||||||
import spack.deptypes
|
import spack.deptypes
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.spack_yaml
|
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.tokenize import Token, TokenBase, Tokenizer
|
from spack.tokenize import Token, TokenBase, Tokenizer
|
||||||
|
|
||||||
@@ -208,32 +204,6 @@ def __init__(self, tokens: List[Token], text: str):
|
|||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
def _warn_about_variant_after_compiler(literal_str: str, issues: List[str]):
|
|
||||||
"""Issue a warning if variant or other token is preceded by a compiler token. The warning is
|
|
||||||
only issued if it's actionable: either we know the config file it originates from, or we have
|
|
||||||
call site that's not internal to Spack."""
|
|
||||||
ignore = [spack.paths.lib_path, spack.paths.bin_path]
|
|
||||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(literal_str)
|
|
||||||
issue_str = ", ".join(issues)
|
|
||||||
error = f"{issue_str} in `{literal_str}`"
|
|
||||||
|
|
||||||
# warning from config file
|
|
||||||
if mark:
|
|
||||||
warnings.warn(f"{mark.name}:{mark.line + 1}: {error}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# warning from hopefully package.py
|
|
||||||
for frame in reversed(traceback.extract_stack()):
|
|
||||||
if frame.lineno and not any(frame.filename.startswith(path) for path in ignore):
|
|
||||||
warnings.warn_explicit(
|
|
||||||
error,
|
|
||||||
category=spack.error.SpackAPIWarning,
|
|
||||||
filename=frame.filename,
|
|
||||||
lineno=frame.lineno,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
class SpecParser:
|
class SpecParser:
|
||||||
"""Parse text into specs"""
|
"""Parse text into specs"""
|
||||||
|
|
||||||
@@ -272,31 +242,26 @@ def add_dependency(dep, **edge_properties):
|
|||||||
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
||||||
|
|
||||||
initial_spec = initial_spec or spack.spec.Spec()
|
initial_spec = initial_spec or spack.spec.Spec()
|
||||||
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
||||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||||
edge_properties.setdefault("depflag", 0)
|
edge_properties.setdefault("depflag", 0)
|
||||||
edge_properties.setdefault("virtuals", ())
|
edge_properties.setdefault("virtuals", ())
|
||||||
dependency, warnings = self._parse_node(root_spec)
|
dependency = self._parse_node(root_spec)
|
||||||
parser_warnings.extend(warnings)
|
|
||||||
add_dependency(dependency, **edge_properties)
|
add_dependency(dependency, **edge_properties)
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||||
dependency, warnings = self._parse_node(root_spec)
|
dependency = self._parse_node(root_spec)
|
||||||
parser_warnings.extend(warnings)
|
|
||||||
add_dependency(dependency, depflag=0, virtuals=())
|
add_dependency(dependency, depflag=0, virtuals=())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
if parser_warnings:
|
|
||||||
_warn_about_variant_after_compiler(self.literal_str, parser_warnings)
|
|
||||||
|
|
||||||
return root_spec
|
return root_spec
|
||||||
|
|
||||||
def _parse_node(self, root_spec):
|
def _parse_node(self, root_spec):
|
||||||
dependency, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse()
|
dependency = SpecNodeParser(self.ctx, self.literal_str).parse()
|
||||||
if dependency is None:
|
if dependency is None:
|
||||||
msg = (
|
msg = (
|
||||||
"the dependency sigil and any optional edge attributes must be followed by a "
|
"the dependency sigil and any optional edge attributes must be followed by a "
|
||||||
@@ -305,7 +270,7 @@ def _parse_node(self, root_spec):
|
|||||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||||
if root_spec.concrete:
|
if root_spec.concrete:
|
||||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||||
return dependency, parser_warnings
|
return dependency
|
||||||
|
|
||||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||||
"""Return all the specs that remain to be parsed"""
|
"""Return all the specs that remain to be parsed"""
|
||||||
@@ -325,7 +290,7 @@ def __init__(self, ctx, literal_str):
|
|||||||
|
|
||||||
def parse(
|
def parse(
|
||||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||||
) -> Tuple["spack.spec.Spec", List[str]]:
|
) -> Optional["spack.spec.Spec"]:
|
||||||
"""Parse a single spec node from a stream of tokens
|
"""Parse a single spec node from a stream of tokens
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -334,15 +299,12 @@ def parse(
|
|||||||
Return
|
Return
|
||||||
The object passed as argument
|
The object passed as argument
|
||||||
"""
|
"""
|
||||||
parser_warnings: List[str] = []
|
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
||||||
last_compiler = None
|
return initial_spec
|
||||||
|
|
||||||
if initial_spec is None:
|
if initial_spec is None:
|
||||||
initial_spec = spack.spec.Spec()
|
initial_spec = spack.spec.Spec()
|
||||||
|
|
||||||
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
|
||||||
return initial_spec, parser_warnings
|
|
||||||
|
|
||||||
# If we start with a package name we have a named spec, we cannot
|
# If we start with a package name we have a named spec, we cannot
|
||||||
# accept another package name afterwards in a node
|
# accept another package name afterwards in a node
|
||||||
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
|
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
|
||||||
@@ -356,7 +318,7 @@ def parse(
|
|||||||
initial_spec.namespace = namespace
|
initial_spec.namespace = namespace
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.FILENAME):
|
elif self.ctx.accept(SpecTokens.FILENAME):
|
||||||
return FileParser(self.ctx).parse(initial_spec), parser_warnings
|
return FileParser(self.ctx).parse(initial_spec)
|
||||||
|
|
||||||
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||||
"""Raise a spec parsing error with token context."""
|
"""Raise a spec parsing error with token context."""
|
||||||
@@ -369,12 +331,6 @@ def add_flag(name: str, value: str, propagate: bool):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise_parsing_error(str(e), e)
|
raise_parsing_error(str(e), e)
|
||||||
|
|
||||||
def warn_if_after_compiler(token: str):
|
|
||||||
"""Register a warning for %compiler followed by +variant that will in the future apply
|
|
||||||
to the compiler instead of the current root."""
|
|
||||||
if last_compiler:
|
|
||||||
parser_warnings.append(f"`{token}` should go before `{last_compiler}`")
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(SpecTokens.COMPILER):
|
if self.ctx.accept(SpecTokens.COMPILER):
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
@@ -383,7 +339,6 @@ def warn_if_after_compiler(token: str):
|
|||||||
compiler_name = self.ctx.current_token.value[1:]
|
compiler_name = self.ctx.current_token.value[1:]
|
||||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||||
self.has_compiler = True
|
self.has_compiler = True
|
||||||
last_compiler = self.ctx.current_token.value
|
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
|
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
@@ -394,7 +349,6 @@ def warn_if_after_compiler(token: str):
|
|||||||
compiler_name.strip(), compiler_version
|
compiler_name.strip(), compiler_version
|
||||||
)
|
)
|
||||||
self.has_compiler = True
|
self.has_compiler = True
|
||||||
last_compiler = self.ctx.current_token.value
|
|
||||||
|
|
||||||
elif (
|
elif (
|
||||||
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
||||||
@@ -409,17 +363,14 @@ def warn_if_after_compiler(token: str):
|
|||||||
)
|
)
|
||||||
initial_spec.attach_git_version_lookup()
|
initial_spec.attach_git_version_lookup()
|
||||||
self.has_version = True
|
self.has_version = True
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
||||||
variant_value = self.ctx.current_token.value[0] == "+"
|
variant_value = self.ctx.current_token.value[0] == "+"
|
||||||
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
||||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||||
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||||
@@ -427,7 +378,6 @@ def warn_if_after_compiler(token: str):
|
|||||||
|
|
||||||
name, _, value = match.groups()
|
name, _, value = match.groups()
|
||||||
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
||||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||||
@@ -435,19 +385,17 @@ def warn_if_after_compiler(token: str):
|
|||||||
|
|
||||||
name, _, value = match.groups()
|
name, _, value = match.groups()
|
||||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
||||||
if initial_spec.abstract_hash:
|
if initial_spec.abstract_hash:
|
||||||
break
|
break
|
||||||
self.ctx.accept(SpecTokens.DAG_HASH)
|
self.ctx.accept(SpecTokens.DAG_HASH)
|
||||||
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
||||||
warn_if_after_compiler(self.ctx.current_token.value)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
return initial_spec, parser_warnings
|
return initial_spec
|
||||||
|
|
||||||
|
|
||||||
class FileParser:
|
class FileParser:
|
||||||
@@ -537,18 +485,23 @@ def parse_one_or_raise(
|
|||||||
text (str): text to be parsed
|
text (str): text to be parsed
|
||||||
initial_spec: buffer where to parse the spec. If None a new one will be created.
|
initial_spec: buffer where to parse the spec. If None a new one will be created.
|
||||||
"""
|
"""
|
||||||
parser = SpecParser(text)
|
stripped_text = text.strip()
|
||||||
|
parser = SpecParser(stripped_text)
|
||||||
result = parser.next_spec(initial_spec)
|
result = parser.next_spec(initial_spec)
|
||||||
next_token = parser.ctx.next_token
|
last_token = parser.ctx.current_token
|
||||||
|
|
||||||
if next_token:
|
if last_token is not None and last_token.end != len(stripped_text):
|
||||||
message = f"expected a single spec, but got more:\n{text}"
|
message = "a single spec was requested, but parsed more than one:"
|
||||||
underline = f"\n{' ' * next_token.start}{'^' * len(next_token.value)}"
|
message += f"\n{text}"
|
||||||
|
if last_token is not None:
|
||||||
|
underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}"
|
||||||
message += color.colorize(f"@*r{{{underline}}}")
|
message += color.colorize(f"@*r{{{underline}}}")
|
||||||
raise ValueError(message)
|
raise ValueError(message)
|
||||||
|
|
||||||
if result is None:
|
if result is None:
|
||||||
raise ValueError("expected a single spec, but got none")
|
message = "a single spec was requested, but none was parsed:"
|
||||||
|
message += f"\n{text}"
|
||||||
|
raise ValueError(message)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
|||||||
)
|
)
|
||||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||||
spec = Spec(
|
spec = Spec(
|
||||||
f"pkg-a foobar=bar target={root_target_range} %gcc@10 ^pkg-b target={dep_target_range}"
|
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||||
)
|
)
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
spec = spack.concretize.concretize_one(spec)
|
spec = spack.concretize.concretize_one(spec)
|
||||||
|
|||||||
@@ -200,11 +200,7 @@ def dummy_prefix(tmpdir):
|
|||||||
@pytest.mark.requires_executables(*required_executables)
|
@pytest.mark.requires_executables(*required_executables)
|
||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.usefixtures(
|
@pytest.mark.usefixtures(
|
||||||
"default_config",
|
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||||
"cache_directory",
|
|
||||||
"install_dir_default_layout",
|
|
||||||
"temporary_mirror",
|
|
||||||
"mutable_mock_env_path",
|
|
||||||
)
|
)
|
||||||
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||||
"""
|
"""
|
||||||
@@ -276,11 +272,7 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
|||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.nomockstage
|
@pytest.mark.nomockstage
|
||||||
@pytest.mark.usefixtures(
|
@pytest.mark.usefixtures(
|
||||||
"default_config",
|
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||||
"cache_directory",
|
|
||||||
"install_dir_default_layout",
|
|
||||||
"temporary_mirror",
|
|
||||||
"mutable_mock_env_path",
|
|
||||||
)
|
)
|
||||||
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||||
"""
|
"""
|
||||||
@@ -577,6 +569,7 @@ def test_FetchCacheError_only_accepts_lists_of_errors():
|
|||||||
def test_FetchCacheError_pretty_printing_multiple():
|
def test_FetchCacheError_pretty_printing_multiple():
|
||||||
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
|
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
|
||||||
str_e = str(e)
|
str_e = str(e)
|
||||||
|
print("'" + str_e + "'")
|
||||||
assert "Multiple errors" in str_e
|
assert "Multiple errors" in str_e
|
||||||
assert "Error 1: RuntimeError: Oops!" in str_e
|
assert "Error 1: RuntimeError: Oops!" in str_e
|
||||||
assert "Error 2: TypeError: Trouble!" in str_e
|
assert "Error 2: TypeError: Trouble!" in str_e
|
||||||
@@ -1140,7 +1133,7 @@ def test_get_valid_spec_file_no_json(tmp_path, filename):
|
|||||||
|
|
||||||
def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys):
|
def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys):
|
||||||
layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
|
layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
|
||||||
spec = Spec("gmake@4.4.1 arch=linux-ubuntu23.04-zen2 %gcc@13.1.0")
|
spec = Spec("gmake@4.4.1%gcc@13.1.0 arch=linux-ubuntu23.04-zen2")
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
spec_dict = spec.to_dict()
|
spec_dict = spec.to_dict()
|
||||||
spec_dict["buildcache_layout_version"] = layout_version
|
spec_dict["buildcache_layout_version"] = layout_version
|
||||||
|
|||||||
@@ -388,7 +388,7 @@ def test_wrapper_variables(
|
|||||||
root = spack.concretize.concretize_one("dt-diamond")
|
root = spack.concretize.concretize_one("dt-diamond")
|
||||||
|
|
||||||
for s in root.traverse():
|
for s in root.traverse():
|
||||||
s.set_prefix(f"/{s.name}-prefix/")
|
s.prefix = "/{0}-prefix/".format(s.name)
|
||||||
|
|
||||||
dep_pkg = root["dt-diamond-left"].package
|
dep_pkg = root["dt-diamond-left"].package
|
||||||
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
|
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
|
||||||
@@ -396,7 +396,7 @@ def test_wrapper_variables(
|
|||||||
dep_libs = LibraryList(dep_lib_paths)
|
dep_libs = LibraryList(dep_lib_paths)
|
||||||
|
|
||||||
dep2_pkg = root["dt-diamond-right"].package
|
dep2_pkg = root["dt-diamond-right"].package
|
||||||
dep2_pkg.spec.set_prefix(str(installation_dir_with_headers))
|
dep2_pkg.spec.prefix = str(installation_dir_with_headers)
|
||||||
|
|
||||||
setattr(dep_pkg, "libs", dep_libs)
|
setattr(dep_pkg, "libs", dep_libs)
|
||||||
try:
|
try:
|
||||||
@@ -542,7 +542,7 @@ def test_build_jobs_sequential_is_sequential():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=False,
|
parallel=False,
|
||||||
max_cpus=8,
|
max_cpus=8,
|
||||||
config=spack.config.create_from(
|
config=spack.config.Configuration(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
|
||||||
),
|
),
|
||||||
@@ -556,7 +556,7 @@ def test_build_jobs_command_line_overrides():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=1,
|
max_cpus=1,
|
||||||
config=spack.config.create_from(
|
config=spack.config.Configuration(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
|
||||||
),
|
),
|
||||||
@@ -567,7 +567,7 @@ def test_build_jobs_command_line_overrides():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=100,
|
max_cpus=100,
|
||||||
config=spack.config.create_from(
|
config=spack.config.Configuration(
|
||||||
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
|
||||||
),
|
),
|
||||||
@@ -581,7 +581,7 @@ def test_build_jobs_defaults():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=10,
|
max_cpus=10,
|
||||||
config=spack.config.create_from(
|
config=spack.config.Configuration(
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -591,7 +591,7 @@ def test_build_jobs_defaults():
|
|||||||
spack.config.determine_number_of_jobs(
|
spack.config.determine_number_of_jobs(
|
||||||
parallel=True,
|
parallel=True,
|
||||||
max_cpus=10,
|
max_cpus=10,
|
||||||
config=spack.config.create_from(
|
config=spack.config.Configuration(
|
||||||
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -403,8 +403,8 @@ def test_autoreconf_search_path_args_multiple(default_mock_concretization, tmpdi
|
|||||||
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
|
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
|
||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
build_dep_one.prefix = str(tmpdir.join("fst"))
|
||||||
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
build_dep_two.prefix = str(tmpdir.join("snd"))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
||||||
"-I",
|
"-I",
|
||||||
aclocal_fst,
|
aclocal_fst,
|
||||||
@@ -422,8 +422,8 @@ def test_autoreconf_search_path_args_skip_automake(default_mock_concretization,
|
|||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.name = "automake"
|
build_dep_one.name = "automake"
|
||||||
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
build_dep_one.prefix = str(tmpdir.join("fst"))
|
||||||
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
build_dep_two.prefix = str(tmpdir.join("snd"))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
|
||||||
|
|
||||||
|
|
||||||
@@ -434,7 +434,7 @@ def test_autoreconf_search_path_args_external_order(default_mock_concretization,
|
|||||||
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.external_path = str(tmpdir.join("fst"))
|
build_dep_one.external_path = str(tmpdir.join("fst"))
|
||||||
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
build_dep_two.prefix = str(tmpdir.join("snd"))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
|
||||||
"-I",
|
"-I",
|
||||||
aclocal_snd,
|
aclocal_snd,
|
||||||
@@ -447,8 +447,8 @@ def test_autoreconf_search_path_skip_nonexisting(default_mock_concretization, tm
|
|||||||
"""Skip -I flags for non-existing directories"""
|
"""Skip -I flags for non-existing directories"""
|
||||||
spec = default_mock_concretization("dttop")
|
spec = default_mock_concretization("dttop")
|
||||||
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
|
||||||
build_dep_one.set_prefix(str(tmpdir.join("fst")))
|
build_dep_one.prefix = str(tmpdir.join("fst"))
|
||||||
build_dep_two.set_prefix(str(tmpdir.join("snd")))
|
build_dep_two.prefix = str(tmpdir.join("snd"))
|
||||||
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []
|
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -210,6 +210,7 @@ def check_args_contents(cc, args, must_contain, must_not_contain):
|
|||||||
"""
|
"""
|
||||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||||
|
print(cc_modified_args)
|
||||||
for a in must_contain:
|
for a in must_contain:
|
||||||
assert a in cc_modified_args
|
assert a in cc_modified_args
|
||||||
for a in must_not_contain:
|
for a in must_not_contain:
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
import spack.repo as repo
|
import spack.repo as repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
from spack.test.conftest import MockHTTPResponse
|
from spack.test.conftest import MockHTTPResponse
|
||||||
from spack.version import Version
|
|
||||||
|
|
||||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||||
|
|
||||||
@@ -31,43 +30,6 @@ def repro_dir(tmp_path):
|
|||||||
yield result
|
yield result
|
||||||
|
|
||||||
|
|
||||||
def test_get_added_versions_new_checksum(mock_git_package_changes):
|
|
||||||
repo_path, filename, commits = mock_git_package_changes
|
|
||||||
|
|
||||||
checksum_versions = {
|
|
||||||
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
|
|
||||||
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
|
|
||||||
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
|
|
||||||
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
|
|
||||||
}
|
|
||||||
|
|
||||||
with fs.working_dir(str(repo_path)):
|
|
||||||
added_versions = ci.get_added_versions(
|
|
||||||
checksum_versions, filename, from_ref=commits[-1], to_ref=commits[-2]
|
|
||||||
)
|
|
||||||
assert len(added_versions) == 1
|
|
||||||
assert added_versions[0] == Version("2.1.5")
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_added_versions_new_commit(mock_git_package_changes):
|
|
||||||
repo_path, filename, commits = mock_git_package_changes
|
|
||||||
|
|
||||||
checksum_versions = {
|
|
||||||
"74253725f884e2424a0dd8ae3f69896d5377f325": Version("2.1.6"),
|
|
||||||
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
|
|
||||||
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
|
|
||||||
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
|
|
||||||
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
|
|
||||||
}
|
|
||||||
|
|
||||||
with fs.working_dir(str(repo_path)):
|
|
||||||
added_versions = ci.get_added_versions(
|
|
||||||
checksum_versions, filename, from_ref=commits[2], to_ref=commits[1]
|
|
||||||
)
|
|
||||||
assert len(added_versions) == 1
|
|
||||||
assert added_versions[0] == Version("2.1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_pipeline_dag(config, tmpdir):
|
def test_pipeline_dag(config, tmpdir):
|
||||||
r"""Test creation, pruning, and traversal of PipelineDAG using the
|
r"""Test creation, pruning, and traversal of PipelineDAG using the
|
||||||
following package dependency graph:
|
following package dependency graph:
|
||||||
@@ -385,6 +347,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
|||||||
for key, val in expectations.items():
|
for key, val in expectations.items():
|
||||||
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
|
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
|
||||||
affected_pkg_names = set([s.name for s in affected_specs])
|
affected_pkg_names = set([s.name for s in affected_specs])
|
||||||
|
print(f"{key}: {affected_pkg_names}")
|
||||||
assert affected_pkg_names == val
|
assert affected_pkg_names == val
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
build_env = SpackCommand("build-env")
|
build_env = SpackCommand("build-env")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("pkg", [("pkg-c",), ("pkg-c", "--")])
|
@pytest.mark.parametrize("pkg", [("zlib",), ("zlib", "--")])
|
||||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||||
def test_it_just_runs(pkg):
|
def test_it_just_runs(pkg):
|
||||||
build_env(*pkg)
|
build_env(*pkg)
|
||||||
@@ -38,7 +38,7 @@ def test_build_env_requires_a_spec(args):
|
|||||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||||
def test_dump(shell_as, shell, tmpdir):
|
def test_dump(shell_as, shell, tmpdir):
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
build_env("--dump", _out_file, "pkg-c")
|
build_env("--dump", _out_file, "zlib")
|
||||||
with open(_out_file, encoding="utf-8") as f:
|
with open(_out_file, encoding="utf-8") as f:
|
||||||
if shell == "pwsh":
|
if shell == "pwsh":
|
||||||
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
||||||
@@ -51,7 +51,7 @@ def test_dump(shell_as, shell, tmpdir):
|
|||||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||||
def test_pickle(tmpdir):
|
def test_pickle(tmpdir):
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
build_env("--pickle", _out_file, "pkg-c")
|
build_env("--pickle", _out_file, "zlib")
|
||||||
environment = pickle.load(open(_out_file, "rb"))
|
environment = pickle.load(open(_out_file, "rb"))
|
||||||
assert isinstance(environment, dict)
|
assert isinstance(environment, dict)
|
||||||
assert "PATH" in environment
|
assert "PATH" in environment
|
||||||
|
|||||||
@@ -148,7 +148,7 @@ def test_update_key_index(
|
|||||||
s = spack.concretize.concretize_one("libdwarf")
|
s = spack.concretize.concretize_one("libdwarf")
|
||||||
|
|
||||||
# Install a package
|
# Install a package
|
||||||
install("--fake", s.name)
|
install(s.name)
|
||||||
|
|
||||||
# Put installed package in the buildcache, which, because we're signing
|
# Put installed package in the buildcache, which, because we're signing
|
||||||
# it, should result in the public key getting pushed to the buildcache
|
# it, should result in the public key getting pushed to the buildcache
|
||||||
@@ -178,7 +178,7 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
|||||||
s = spack.concretize.concretize_one("libdwarf")
|
s = spack.concretize.concretize_one("libdwarf")
|
||||||
|
|
||||||
# Install and generate build cache index
|
# Install and generate build cache index
|
||||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
PackageInstaller([s.package], explicit=True).install()
|
||||||
|
|
||||||
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
||||||
|
|
||||||
@@ -214,11 +214,13 @@ def verify_mirror_contents():
|
|||||||
if in_env_pkg in p:
|
if in_env_pkg in p:
|
||||||
found_pkg = True
|
found_pkg = True
|
||||||
|
|
||||||
assert found_pkg, f"Expected to find {in_env_pkg} in {dest_mirror_dir}"
|
if not found_pkg:
|
||||||
|
print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
|
||||||
|
assert False
|
||||||
|
|
||||||
# Install a package and put it in the buildcache
|
# Install a package and put it in the buildcache
|
||||||
s = spack.concretize.concretize_one(out_env_pkg)
|
s = spack.concretize.concretize_one(out_env_pkg)
|
||||||
install("--fake", s.name)
|
install(s.name)
|
||||||
buildcache("push", "-u", "-f", src_mirror_url, s.name)
|
buildcache("push", "-u", "-f", src_mirror_url, s.name)
|
||||||
|
|
||||||
env("create", "test")
|
env("create", "test")
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import pathlib
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -23,15 +22,7 @@
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def no_add(monkeypatch):
|
def can_fetch_versions(monkeypatch):
|
||||||
def add_versions_to_pkg(pkg, version_lines, open_in_editor):
|
|
||||||
raise AssertionError("Should not be called")
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.checksum, "add_versions_to_pkg", add_versions_to_pkg)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def can_fetch_versions(monkeypatch, no_add):
|
|
||||||
"""Fake successful version detection."""
|
"""Fake successful version detection."""
|
||||||
|
|
||||||
def fetch_remote_versions(pkg, concurrency):
|
def fetch_remote_versions(pkg, concurrency):
|
||||||
@@ -54,7 +45,7 @@ def url_exists(url, curl=None):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def cannot_fetch_versions(monkeypatch, no_add):
|
def cannot_fetch_versions(monkeypatch):
|
||||||
"""Fake unsuccessful version detection."""
|
"""Fake unsuccessful version detection."""
|
||||||
|
|
||||||
def fetch_remote_versions(pkg, concurrency):
|
def fetch_remote_versions(pkg, concurrency):
|
||||||
@@ -97,6 +88,7 @@ def test_checksum_args(arguments, expected):
|
|||||||
(["--batch", "preferred-test"], "version of preferred-test"),
|
(["--batch", "preferred-test"], "version of preferred-test"),
|
||||||
(["--latest", "preferred-test"], "Found 1 version"),
|
(["--latest", "preferred-test"], "Found 1 version"),
|
||||||
(["--preferred", "preferred-test"], "Found 1 version"),
|
(["--preferred", "preferred-test"], "Found 1 version"),
|
||||||
|
(["--add-to-package", "preferred-test"], "Added 0 new versions to"),
|
||||||
(["--verify", "preferred-test"], "Verified 1 of 1"),
|
(["--verify", "preferred-test"], "Verified 1 of 1"),
|
||||||
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
|
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
|
||||||
],
|
],
|
||||||
@@ -279,12 +271,15 @@ def test_checksum_interactive_unrecognized_command():
|
|||||||
assert interactive_version_filter(v.copy(), input=input) == v
|
assert interactive_version_filter(v.copy(), input=input) == v
|
||||||
|
|
||||||
|
|
||||||
def test_checksum_versions(mock_packages, can_fetch_versions, monkeypatch):
|
def test_checksum_versions(mock_packages, can_fetch_versions):
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||||
versions = [str(v) for v in pkg_cls.versions]
|
versions = [str(v) for v in pkg_cls.versions]
|
||||||
output = spack_checksum("zlib", *versions)
|
output = spack_checksum("zlib", *versions)
|
||||||
assert "Found 3 versions" in output
|
assert "Found 3 versions" in output
|
||||||
assert "version(" in output
|
assert "version(" in output
|
||||||
|
output = spack_checksum("--add-to-package", "zlib", *versions)
|
||||||
|
assert "Found 3 versions" in output
|
||||||
|
assert "Added 0 new versions to" in output
|
||||||
|
|
||||||
|
|
||||||
def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
|
def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
|
||||||
@@ -292,6 +287,7 @@ def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
|
|||||||
assert "Could not find any remote versions" in output
|
assert "Could not find any remote versions" in output
|
||||||
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
|
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
|
||||||
assert "Could not find any remote versions" in output
|
assert "Could not find any remote versions" in output
|
||||||
|
assert "Added 1 new versions to" not in output
|
||||||
|
|
||||||
|
|
||||||
def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
|
def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
|
||||||
@@ -301,6 +297,8 @@ def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
|
|||||||
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
|
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
|
||||||
)
|
)
|
||||||
assert "Version 1.1.0 is deprecated" in output
|
assert "Version 1.1.0 is deprecated" in output
|
||||||
|
# TODO alecbcs: broken assertion.
|
||||||
|
# assert "Added 0 new versions to" not in output
|
||||||
|
|
||||||
|
|
||||||
def test_checksum_url(mock_packages, config):
|
def test_checksum_url(mock_packages, config):
|
||||||
@@ -339,52 +337,3 @@ def test_checksum_manual_download_fails(mock_packages, monkeypatch):
|
|||||||
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
|
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
|
||||||
with pytest.raises(ManualDownloadRequiredError, match=error):
|
with pytest.raises(ManualDownloadRequiredError, match=error):
|
||||||
spack_checksum(name, *versions)
|
spack_checksum(name, *versions)
|
||||||
|
|
||||||
|
|
||||||
def test_upate_package_contents(tmp_path: pathlib.Path):
|
|
||||||
"""Test that the package.py file is updated with the new versions."""
|
|
||||||
pkg_path = tmp_path / "package.py"
|
|
||||||
pkg_path.write_text(
|
|
||||||
"""\
|
|
||||||
from spack.package import *
|
|
||||||
|
|
||||||
class Zlib(Package):
|
|
||||||
homepage = "http://zlib.net"
|
|
||||||
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
|
|
||||||
|
|
||||||
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
|
|
||||||
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
|
|
||||||
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
|
|
||||||
variant("pic", default=True, description="test")
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
make("install")
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
version_lines = """\
|
|
||||||
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
|
|
||||||
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
|
|
||||||
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
|
|
||||||
"""
|
|
||||||
# two new versions are added
|
|
||||||
assert spack.cmd.checksum.add_versions_to_pkg(str(pkg_path), version_lines) == 2
|
|
||||||
assert (
|
|
||||||
pkg_path.read_text()
|
|
||||||
== """\
|
|
||||||
from spack.package import *
|
|
||||||
|
|
||||||
class Zlib(Package):
|
|
||||||
homepage = "http://zlib.net"
|
|
||||||
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
|
|
||||||
|
|
||||||
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
|
|
||||||
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
|
|
||||||
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
|
|
||||||
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
|
|
||||||
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
|
|
||||||
variant("pic", default=True, description="test")
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
make("install")
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -22,17 +22,12 @@
|
|||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.paths as spack_paths
|
import spack.paths as spack_paths
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.version
|
|
||||||
from spack.ci import gitlab as gitlab_generator
|
from spack.ci import gitlab as gitlab_generator
|
||||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||||
from spack.ci.generator_registry import generator
|
from spack.ci.generator_registry import generator
|
||||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||||
from spack.database import INDEX_JSON_FILE
|
from spack.database import INDEX_JSON_FILE
|
||||||
from spack.error import SpackError
|
|
||||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||||
from spack.schema.database_index import schema as db_idx_schema
|
from spack.schema.database_index import schema as db_idx_schema
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -175,9 +170,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
|
|||||||
url: https://my.fake.cdash
|
url: https://my.fake.cdash
|
||||||
project: Not used
|
project: Not used
|
||||||
site: Nothing
|
site: Nothing
|
||||||
""",
|
"""
|
||||||
"--artifacts-root",
|
|
||||||
str(tmp_path / "my_artifacts_root"),
|
|
||||||
)
|
)
|
||||||
yaml_contents = syaml.load(outputfile.read_text())
|
yaml_contents = syaml.load(outputfile.read_text())
|
||||||
|
|
||||||
@@ -199,7 +192,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
|
|||||||
|
|
||||||
assert "variables" in yaml_contents
|
assert "variables" in yaml_contents
|
||||||
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
||||||
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "my_artifacts_root"
|
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "jobs_scratch_dir"
|
||||||
|
|
||||||
|
|
||||||
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
|
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
|
||||||
@@ -871,7 +864,7 @@ def test_push_to_build_cache(
|
|||||||
logs_dir = scratch / "logs_dir"
|
logs_dir = scratch / "logs_dir"
|
||||||
logs_dir.mkdir()
|
logs_dir.mkdir()
|
||||||
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
|
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
|
||||||
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
|
assert "spack-build-out.txt" in os.listdir(logs_dir)
|
||||||
|
|
||||||
dl_dir = scratch / "download_dir"
|
dl_dir = scratch / "download_dir"
|
||||||
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
|
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
|
||||||
@@ -1069,7 +1062,7 @@ def test_ci_rebuild_index(
|
|||||||
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
|
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
|
||||||
f.write(concrete_spec.to_json(hash=ht.dag_hash))
|
f.write(concrete_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
install_cmd("--fake", "--add", "-f", str(tmp_path / "spec.json"))
|
install_cmd("--add", "-f", str(tmp_path / "spec.json"))
|
||||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||||
ci_cmd("rebuild-index")
|
ci_cmd("rebuild-index")
|
||||||
|
|
||||||
@@ -1329,15 +1322,13 @@ def test_ci_reproduce(
|
|||||||
env.concretize()
|
env.concretize()
|
||||||
env.write()
|
env.write()
|
||||||
|
|
||||||
def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
|
|
||||||
with working_dir(tmp_path), ev.Environment(".") as env:
|
|
||||||
if not os.path.exists(repro_dir):
|
|
||||||
repro_dir.mkdir()
|
repro_dir.mkdir()
|
||||||
|
|
||||||
job_spec = env.concrete_roots()[0]
|
job_spec = env.concrete_roots()[0]
|
||||||
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
|
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
|
||||||
f.write(job_spec.to_json(hash=ht.dag_hash))
|
f.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
artifacts_root = repro_dir / "jobs_scratch_dir"
|
|
||||||
|
artifacts_root = repro_dir / "scratch_dir"
|
||||||
pipeline_path = artifacts_root / "pipeline.yml"
|
pipeline_path = artifacts_root / "pipeline.yml"
|
||||||
|
|
||||||
ci_cmd(
|
ci_cmd(
|
||||||
@@ -1365,14 +1356,10 @@ def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
|
|||||||
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
|
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
|
||||||
|
|
||||||
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
|
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
|
||||||
if merge_commit_test:
|
f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n")
|
||||||
f.write(
|
|
||||||
f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
f.write(f"\ncommit {last_two_git_commits[1]}\n\n")
|
|
||||||
|
|
||||||
return "jobs_scratch_dir"
|
def fake_download_and_extract_artifacts(url, work_dir):
|
||||||
|
pass
|
||||||
|
|
||||||
monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
|
monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
|
||||||
rep_out = ci_cmd(
|
rep_out = ci_cmd(
|
||||||
@@ -1388,64 +1375,6 @@ def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
|
|||||||
# Make sure we tell the user where it is when not in interactive mode
|
# Make sure we tell the user where it is when not in interactive mode
|
||||||
assert f"$ {repro_dir}/start.sh" in rep_out
|
assert f"$ {repro_dir}/start.sh" in rep_out
|
||||||
|
|
||||||
# Ensure the correct commits are used
|
|
||||||
assert f"checkout_commit: {last_two_git_commits[0]}" in rep_out
|
|
||||||
assert f"merge_commit: {last_two_git_commits[1]}" in rep_out
|
|
||||||
|
|
||||||
# Test re-running in dirty working dir
|
|
||||||
with pytest.raises(SpackError, match=f"{repro_dir}"):
|
|
||||||
rep_out = ci_cmd(
|
|
||||||
"reproduce-build",
|
|
||||||
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
|
|
||||||
"--working-dir",
|
|
||||||
str(repro_dir),
|
|
||||||
output=str,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Cleanup between tests
|
|
||||||
shutil.rmtree(repro_dir)
|
|
||||||
|
|
||||||
# Test --use-local-head
|
|
||||||
rep_out = ci_cmd(
|
|
||||||
"reproduce-build",
|
|
||||||
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
|
|
||||||
"--use-local-head",
|
|
||||||
"--working-dir",
|
|
||||||
str(repro_dir),
|
|
||||||
output=str,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Make sure we are checkout out the HEAD commit without a merge commit
|
|
||||||
assert "checkout_commit: HEAD" in rep_out
|
|
||||||
assert "merge_commit: None" in rep_out
|
|
||||||
|
|
||||||
# Test the case where the spack_info.txt is not a merge commit
|
|
||||||
monkeypatch.setattr(
|
|
||||||
ci,
|
|
||||||
"download_and_extract_artifacts",
|
|
||||||
lambda url, wd: fake_download_and_extract_artifacts(url, wd, False),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Cleanup between tests
|
|
||||||
shutil.rmtree(repro_dir)
|
|
||||||
|
|
||||||
rep_out = ci_cmd(
|
|
||||||
"reproduce-build",
|
|
||||||
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
|
|
||||||
"--working-dir",
|
|
||||||
str(repro_dir),
|
|
||||||
output=str,
|
|
||||||
)
|
|
||||||
# Make sure the script was generated
|
|
||||||
assert (repro_dir / "start.sh").exists()
|
|
||||||
|
|
||||||
# Make sure we tell the user where it is when not in interactive mode
|
|
||||||
assert f"$ {repro_dir}/start.sh" in rep_out
|
|
||||||
|
|
||||||
# Ensure the correct commit is used (different than HEAD)
|
|
||||||
assert f"checkout_commit: {last_two_git_commits[1]}" in rep_out
|
|
||||||
assert "merge_commit: None" in rep_out
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"url_in,url_out",
|
"url_in,url_out",
|
||||||
@@ -1845,216 +1774,3 @@ def test_ci_generate_alternate_target(
|
|||||||
|
|
||||||
assert pipeline_doc.startswith("unittestpipeline")
|
assert pipeline_doc.startswith("unittestpipeline")
|
||||||
assert "externaltest" in pipeline_doc
|
assert "externaltest" in pipeline_doc
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def fetch_versions_match(monkeypatch):
|
|
||||||
"""Fake successful checksums returned from downloaded tarballs."""
|
|
||||||
|
|
||||||
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(package_name)
|
|
||||||
return {v: pkg_cls.versions[v]["sha256"] for v in url_by_version}
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def fetch_versions_invalid(monkeypatch):
|
|
||||||
"""Fake successful checksums returned from downloaded tarballs."""
|
|
||||||
|
|
||||||
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
|
|
||||||
return {
|
|
||||||
v: "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
|
|
||||||
for v in url_by_version
|
|
||||||
}
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
|
|
||||||
def test_ci_validate_standard_versions_valid(capfd, mock_packages, fetch_versions_match, versions):
|
|
||||||
spec = spack.spec.Spec("diff-test")
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
version_list = [spack.version.Version(v) for v in versions]
|
|
||||||
|
|
||||||
assert spack.cmd.ci.validate_standard_versions(pkg, version_list)
|
|
||||||
|
|
||||||
out, err = capfd.readouterr()
|
|
||||||
for version in versions:
|
|
||||||
assert f"Validated diff-test@{version}" in out
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
|
|
||||||
def test_ci_validate_standard_versions_invalid(
|
|
||||||
capfd, mock_packages, fetch_versions_invalid, versions
|
|
||||||
):
|
|
||||||
spec = spack.spec.Spec("diff-test")
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
version_list = [spack.version.Version(v) for v in versions]
|
|
||||||
|
|
||||||
assert spack.cmd.ci.validate_standard_versions(pkg, version_list) is False
|
|
||||||
|
|
||||||
out, err = capfd.readouterr()
|
|
||||||
for version in versions:
|
|
||||||
assert f"Invalid checksum found diff-test@{version}" in err
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6)]])
|
|
||||||
def test_ci_validate_git_versions_valid(
|
|
||||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
|
||||||
):
|
|
||||||
spec = spack.spec.Spec("diff-test")
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
|
||||||
|
|
||||||
repo_path, filename, commits = mock_git_version_info
|
|
||||||
version_commit_dict = {
|
|
||||||
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg_class = spec.package_class
|
|
||||||
|
|
||||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
|
||||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
|
||||||
|
|
||||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list)
|
|
||||||
|
|
||||||
out, err = capfd.readouterr()
|
|
||||||
for version in version_list:
|
|
||||||
assert f"Validated diff-test@{version}" in out
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("versions", [[("1.0", -3)], [("1.1", -5), ("2.0", -5)]])
|
|
||||||
def test_ci_validate_git_versions_bad_tag(
|
|
||||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
|
||||||
):
|
|
||||||
spec = spack.spec.Spec("diff-test")
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
|
||||||
|
|
||||||
repo_path, filename, commits = mock_git_version_info
|
|
||||||
version_commit_dict = {
|
|
||||||
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg_class = spec.package_class
|
|
||||||
|
|
||||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
|
||||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
|
||||||
|
|
||||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
|
|
||||||
|
|
||||||
out, err = capfd.readouterr()
|
|
||||||
for version in version_list:
|
|
||||||
assert f"Mismatched tag <-> commit found for diff-test@{version}" in err
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6), ("3.0", -6)]])
|
|
||||||
def test_ci_validate_git_versions_invalid(
|
|
||||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
|
||||||
):
|
|
||||||
spec = spack.spec.Spec("diff-test")
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
|
||||||
|
|
||||||
repo_path, filename, commits = mock_git_version_info
|
|
||||||
version_commit_dict = {
|
|
||||||
spack.version.Version(v): {
|
|
||||||
"tag": f"v{v}",
|
|
||||||
"commit": "abcdefabcdefabcdefabcdefabcdefabcdefabc",
|
|
||||||
}
|
|
||||||
for v, c in versions
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg_class = spec.package_class
|
|
||||||
|
|
||||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
|
||||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
|
||||||
|
|
||||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
|
|
||||||
|
|
||||||
out, err = capfd.readouterr()
|
|
||||||
for version in version_list:
|
|
||||||
assert f"Invalid commit for diff-test@{version}" in err
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def verify_standard_versions_valid(monkeypatch):
|
|
||||||
def validate_standard_versions(pkg, versions):
|
|
||||||
for version in versions:
|
|
||||||
print(f"Validated {pkg.name}@{version}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def verify_git_versions_valid(monkeypatch):
|
|
||||||
def validate_git_versions(pkg, versions):
|
|
||||||
for version in versions:
|
|
||||||
print(f"Validated {pkg.name}@{version}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def verify_standard_versions_invalid(monkeypatch):
|
|
||||||
def validate_standard_versions(pkg, versions):
|
|
||||||
for version in versions:
|
|
||||||
print(f"Invalid checksum found {pkg.name}@{version}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def verify_git_versions_invalid(monkeypatch):
|
|
||||||
def validate_git_versions(pkg, versions):
|
|
||||||
for version in versions:
|
|
||||||
print(f"Invalid commit for {pkg.name}@{version}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ci_verify_versions_valid(
|
|
||||||
monkeypatch,
|
|
||||||
mock_packages,
|
|
||||||
mock_git_package_changes,
|
|
||||||
verify_standard_versions_valid,
|
|
||||||
verify_git_versions_valid,
|
|
||||||
):
|
|
||||||
repo_path, _, commits = mock_git_package_changes
|
|
||||||
monkeypatch.setattr(spack.paths, "prefix", repo_path)
|
|
||||||
|
|
||||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
|
||||||
assert "Validated diff-test@2.1.5" in out
|
|
||||||
assert "Validated diff-test@2.1.6" in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_ci_verify_versions_standard_invalid(
|
|
||||||
monkeypatch,
|
|
||||||
mock_packages,
|
|
||||||
mock_git_package_changes,
|
|
||||||
verify_standard_versions_invalid,
|
|
||||||
verify_git_versions_invalid,
|
|
||||||
):
|
|
||||||
repo_path, _, commits = mock_git_package_changes
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.paths, "prefix", repo_path)
|
|
||||||
|
|
||||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
|
||||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
|
||||||
assert "Invalid commit for diff-test@2.1.6" in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes):
|
|
||||||
repo_path, _, commits = mock_git_package_changes
|
|
||||||
monkeypatch.setattr(spack.paths, "prefix", repo_path)
|
|
||||||
|
|
||||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
|
||||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
|
||||||
|
|
||||||
out = ci_cmd("verify-versions", commits[-1], commits[-2])
|
|
||||||
assert "Skipping manual download package: diff-test" in out
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import textwrap
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -260,25 +259,15 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
|||||||
def test_updated_completion_scripts(shell, tmpdir):
|
def test_updated_completion_scripts(shell, tmpdir):
|
||||||
"""Make sure our shell tab completion scripts remain up-to-date."""
|
"""Make sure our shell tab completion scripts remain up-to-date."""
|
||||||
|
|
||||||
width = 72
|
msg = (
|
||||||
lines = textwrap.wrap(
|
|
||||||
"It looks like Spack's command-line interface has been modified. "
|
"It looks like Spack's command-line interface has been modified. "
|
||||||
"If differences are more than your global 'include:' scopes, please "
|
"Please update Spack's shell tab completion scripts by running:\n\n"
|
||||||
"update Spack's shell tab completion scripts by running:",
|
" spack commands --update-completion\n\n"
|
||||||
width,
|
"and adding the changed files to your pull request."
|
||||||
)
|
)
|
||||||
lines.append("\n spack commands --update-completion\n")
|
|
||||||
lines.extend(
|
|
||||||
textwrap.wrap(
|
|
||||||
"and adding the changed files (minus your global 'include:' scopes) "
|
|
||||||
"to your pull request.",
|
|
||||||
width,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
msg = "\n".join(lines)
|
|
||||||
|
|
||||||
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
||||||
script = f"spack-completion.{shell}"
|
script = "spack-completion.{0}".format(shell)
|
||||||
old_script = os.path.join(spack.paths.share_path, script)
|
old_script = os.path.join(spack.paths.share_path, script)
|
||||||
new_script = str(tmpdir.join(script))
|
new_script = str(tmpdir.join(script))
|
||||||
|
|
||||||
|
|||||||
@@ -213,7 +213,7 @@ def test_config_add_update_dict(mutable_empty_config):
|
|||||||
|
|
||||||
def test_config_with_c_argument(mutable_empty_config):
|
def test_config_with_c_argument(mutable_empty_config):
|
||||||
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
||||||
config_file = "config:install_tree:root:/path/to/config.yaml"
|
config_file = "config:install_root:root:/path/to/config.yaml"
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
args = parser.parse_args(["-c", config_file])
|
args = parser.parse_args(["-c", config_file])
|
||||||
assert config_file in args.config_vars
|
assert config_file in args.config_vars
|
||||||
@@ -221,7 +221,7 @@ def test_config_with_c_argument(mutable_empty_config):
|
|||||||
# Add the path to the config
|
# Add the path to the config
|
||||||
config("add", args.config_vars[0], scope="command_line")
|
config("add", args.config_vars[0], scope="command_line")
|
||||||
output = config("get", "config")
|
output = config("get", "config")
|
||||||
assert "config:\n install_tree:\n root: /path/to/config.yaml" in output
|
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
|
||||||
|
|
||||||
|
|
||||||
def test_config_add_ordered_dict(mutable_empty_config):
|
def test_config_add_ordered_dict(mutable_empty_config):
|
||||||
@@ -335,7 +335,7 @@ def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
|
|||||||
|
|
||||||
|
|
||||||
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
||||||
config("add", "packages:all:require:['%gcc']")
|
config("add", "packages:all:compiler:[gcc]")
|
||||||
|
|
||||||
# contents to add to file
|
# contents to add to file
|
||||||
contents = """spack:
|
contents = """spack:
|
||||||
@@ -357,7 +357,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
|||||||
expected = """packages:
|
expected = """packages:
|
||||||
all:
|
all:
|
||||||
target: [x86_64]
|
target: [x86_64]
|
||||||
require: ['%gcc']
|
compiler: [gcc]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert expected == output
|
assert expected == output
|
||||||
@@ -606,6 +606,7 @@ def test_config_prefer_upstream(
|
|||||||
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
|
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
|
||||||
|
|
||||||
# Make sure only the non-default variants are set.
|
# Make sure only the non-default variants are set.
|
||||||
|
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
|
||||||
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
|
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
|
||||||
assert packages["dependency-install"] == {"version": ["2.0"]}
|
assert packages["dependency-install"] == {"version": ["2.0"]}
|
||||||
# Ensure that neither variant gets listed for hdf5, since they conflict
|
# Ensure that neither variant gets listed for hdf5, since they conflict
|
||||||
|
|||||||
@@ -2,16 +2,52 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
from spack.database import INDEX_JSON_FILE
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
|
from spack.util.executable import which
|
||||||
|
|
||||||
debug = SpackCommand("debug")
|
debug = SpackCommand("debug")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.db
|
||||||
|
def test_create_db_tarball(tmpdir, database):
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
debug("create-db-tarball")
|
||||||
|
|
||||||
|
# get the first non-dotfile to avoid coverage files in the directory
|
||||||
|
files = os.listdir(os.getcwd())
|
||||||
|
tarball_name = next(
|
||||||
|
f for f in files if not f.startswith(".") and not f.startswith("tests")
|
||||||
|
)
|
||||||
|
|
||||||
|
# debug command made an archive
|
||||||
|
assert os.path.exists(tarball_name)
|
||||||
|
|
||||||
|
# print contents of archive
|
||||||
|
tar = which("tar")
|
||||||
|
contents = tar("tzf", tarball_name, output=str)
|
||||||
|
|
||||||
|
# DB file is included
|
||||||
|
assert INDEX_JSON_FILE in contents
|
||||||
|
|
||||||
|
# specfiles from all installs are included
|
||||||
|
for spec in database.query():
|
||||||
|
# externals won't have a specfile
|
||||||
|
if spec.external:
|
||||||
|
continue
|
||||||
|
|
||||||
|
spec_suffix = "%s/.spack/spec.json" % spec.dag_hash()
|
||||||
|
assert spec_suffix in contents
|
||||||
|
|
||||||
|
|
||||||
def test_report():
|
def test_report():
|
||||||
out = debug("report")
|
out = debug("report")
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
|
|||||||
@@ -15,21 +15,18 @@
|
|||||||
deprecate = SpackCommand("deprecate")
|
deprecate = SpackCommand("deprecate")
|
||||||
find = SpackCommand("find")
|
find = SpackCommand("find")
|
||||||
|
|
||||||
# Unit tests should not be affected by the user's managed environments
|
|
||||||
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
|
||||||
|
|
||||||
|
|
||||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
install("--fake", "libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
all_installed = spack.store.STORE.db.query("libelf")
|
all_installed = spack.store.STORE.db.query()
|
||||||
assert len(all_installed) == 2
|
assert len(all_installed) == 2
|
||||||
|
|
||||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||||
|
|
||||||
non_deprecated = spack.store.STORE.db.query("libelf")
|
non_deprecated = spack.store.STORE.db.query()
|
||||||
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
|
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||||
assert all_available == all_installed
|
assert all_available == all_installed
|
||||||
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
|
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
|
||||||
|
|
||||||
@@ -42,24 +39,24 @@ def test_deprecate_fails_no_such_package(mock_packages, mock_archive, mock_fetch
|
|||||||
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
|
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
|
||||||
assert "Spec 'libelf@0.8.10' matches no installed packages" in output
|
assert "Spec 'libelf@0.8.10' matches no installed packages" in output
|
||||||
|
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
|
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
|
||||||
assert "Spec 'libelf@0.8.13' matches no installed packages" in output
|
assert "Spec 'libelf@0.8.13' matches no installed packages" in output
|
||||||
|
|
||||||
|
|
||||||
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery, monkeypatch):
|
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Tests that the -i option allows us to deprecate in favor of a spec
|
"""Tests that the ```-i`` option allows us to deprecate in favor of a spec
|
||||||
that is not yet installed.
|
that is not yet installed."""
|
||||||
"""
|
install("libelf@0.8.10")
|
||||||
install("--fake", "libelf@0.8.10")
|
|
||||||
to_deprecate = spack.store.STORE.db.query("libelf")
|
to_deprecate = spack.store.STORE.db.query()
|
||||||
assert len(to_deprecate) == 1
|
assert len(to_deprecate) == 1
|
||||||
|
|
||||||
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
|
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
|
||||||
|
|
||||||
non_deprecated = spack.store.STORE.db.query("libelf")
|
non_deprecated = spack.store.STORE.db.query()
|
||||||
deprecated = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.DEPRECATED)
|
deprecated = spack.store.STORE.db.query(installed=InstallRecordStatus.DEPRECATED)
|
||||||
assert deprecated == to_deprecate
|
assert deprecated == to_deprecate
|
||||||
assert len(non_deprecated) == 1
|
assert len(non_deprecated) == 1
|
||||||
assert non_deprecated[0].satisfies("libelf@0.8.13")
|
assert non_deprecated[0].satisfies("libelf@0.8.13")
|
||||||
@@ -67,8 +64,8 @@ def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mock
|
|||||||
|
|
||||||
def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Test that the deprecate command deprecates all dependencies properly."""
|
"""Test that the deprecate command deprecates all dependencies properly."""
|
||||||
install("--fake", "libdwarf@20130729 ^libelf@0.8.13")
|
install("libdwarf@20130729 ^libelf@0.8.13")
|
||||||
install("--fake", "libdwarf@20130207 ^libelf@0.8.10")
|
install("libdwarf@20130207 ^libelf@0.8.10")
|
||||||
|
|
||||||
new_spec = spack.concretize.concretize_one("libdwarf@20130729^libelf@0.8.13")
|
new_spec = spack.concretize.concretize_one("libdwarf@20130729^libelf@0.8.13")
|
||||||
old_spec = spack.concretize.concretize_one("libdwarf@20130207^libelf@0.8.10")
|
old_spec = spack.concretize.concretize_one("libdwarf@20130207^libelf@0.8.10")
|
||||||
@@ -84,14 +81,14 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
|
|||||||
assert all_available == all_installed
|
assert all_available == all_installed
|
||||||
assert sorted(all_available) == sorted(deprecated + non_deprecated)
|
assert sorted(all_available) == sorted(deprecated + non_deprecated)
|
||||||
|
|
||||||
assert sorted(non_deprecated) == sorted(new_spec.traverse())
|
assert sorted(non_deprecated) == sorted(list(new_spec.traverse()))
|
||||||
assert sorted(deprecated) == sorted([old_spec, old_spec["libelf"]])
|
assert sorted(deprecated) == sorted(list(old_spec.traverse()))
|
||||||
|
|
||||||
|
|
||||||
def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Tests that we can still uninstall deprecated packages."""
|
"""Tests that we can still uninstall deprecated packages."""
|
||||||
install("--fake", "libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||||
|
|
||||||
@@ -107,9 +104,9 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_m
|
|||||||
|
|
||||||
def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Tests that we can re-deprecate a spec to change its deprecator."""
|
"""Tests that we can re-deprecate a spec to change its deprecator."""
|
||||||
install("--fake", "libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
install("--fake", "libelf@0.8.12")
|
install("libelf@0.8.12")
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
|
deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
|
||||||
|
|
||||||
@@ -120,8 +117,8 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
|
|||||||
|
|
||||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||||
|
|
||||||
non_deprecated = spack.store.STORE.db.query("libelf")
|
non_deprecated = spack.store.STORE.db.query()
|
||||||
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
|
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||||
assert len(non_deprecated) == 2
|
assert len(non_deprecated) == 2
|
||||||
assert len(all_available) == 3
|
assert len(all_available) == 3
|
||||||
|
|
||||||
@@ -132,9 +129,9 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
|
|||||||
def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Tests that when a deprecator spec is deprecated, its deprecatee specs
|
"""Tests that when a deprecator spec is deprecated, its deprecatee specs
|
||||||
are updated to point to the new deprecator."""
|
are updated to point to the new deprecator."""
|
||||||
install("--fake", "libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
install("--fake", "libelf@0.8.12")
|
install("libelf@0.8.12")
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
first_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
|
first_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
|
||||||
second_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.12")
|
second_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.12")
|
||||||
@@ -147,8 +144,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
|
|||||||
|
|
||||||
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
|
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
|
||||||
|
|
||||||
non_deprecated = spack.store.STORE.db.query("libelf")
|
non_deprecated = spack.store.STORE.db.query()
|
||||||
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
|
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
|
||||||
assert len(non_deprecated) == 1
|
assert len(non_deprecated) == 1
|
||||||
assert len(all_available) == 3
|
assert len(all_available) == 3
|
||||||
|
|
||||||
@@ -161,8 +158,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
|
|||||||
def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
"""Tests that the concretizer throws an error if we concretize to a
|
"""Tests that the concretizer throws an error if we concretize to a
|
||||||
deprecated spec"""
|
deprecated spec"""
|
||||||
install("--fake", "libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
install("--fake", "libelf@0.8.10")
|
install("libelf@0.8.10")
|
||||||
|
|
||||||
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
|
||||||
|
|
||||||
|
|||||||
@@ -127,15 +127,16 @@ def test_dev_build_before_until(tmpdir, install_mockery):
|
|||||||
assert not_installed in out
|
assert not_installed in out
|
||||||
|
|
||||||
|
|
||||||
def _print_spack_short_spec(*args):
|
def print_spack_cc(*args):
|
||||||
print(f"SPACK_SHORT_SPEC={os.environ['SPACK_SHORT_SPEC']}")
|
# Eat arguments and print environment variable to test
|
||||||
|
print(os.environ.get("CC", ""))
|
||||||
|
|
||||||
|
|
||||||
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
||||||
monkeypatch.setattr(os, "execvp", _print_spack_short_spec)
|
monkeypatch.setattr(os, "execvp", print_spack_cc)
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
|
output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
|
||||||
assert "SPACK_SHORT_SPEC=dev-build-test-install@0.0.0" in output
|
assert os.path.join("lib", "spack", "env") in output
|
||||||
|
|
||||||
|
|
||||||
def test_dev_build_fails_already_installed(tmpdir, install_mockery):
|
def test_dev_build_fails_already_installed(tmpdir, install_mockery):
|
||||||
|
|||||||
@@ -194,7 +194,7 @@ def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
|
|||||||
|
|
||||||
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||||
"""Test with and without the --first option"""
|
"""Test with and without the --first option"""
|
||||||
install_cmd("--fake", "mpileaks")
|
install_cmd("mpileaks")
|
||||||
|
|
||||||
# Only one version of mpileaks will work
|
# Only one version of mpileaks will work
|
||||||
diff_cmd("mpileaks", "mpileaks")
|
diff_cmd("mpileaks", "mpileaks")
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user