Merge branch 'develop' into packages/mfem-4.8
This commit is contained in:
commit
1825557241
3
.gitattributes
vendored
3
.gitattributes
vendored
@ -1,4 +1,3 @@
|
||||
*.py diff=python
|
||||
*.lp linguist-language=Prolog
|
||||
lib/spack/external/* linguist-vendored
|
||||
*.bat text eol=crlf
|
||||
*.bat text eol=crlf
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
cmake bison bison-devel libstdc++-static gawk
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
|
39
.github/workflows/prechecks.yml
vendored
39
.github/workflows/prechecks.yml
vendored
@ -25,14 +25,16 @@ jobs:
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
@ -40,23 +42,20 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
@ -103,21 +102,3 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
@ -1,7 +1,8 @@
|
||||
black==25.1.0
|
||||
clingo==5.7.1
|
||||
clingo==5.8.0
|
||||
flake8==7.2.0
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250304
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.6
|
||||
|
34
README.md
34
README.md
@ -46,18 +46,42 @@ See the
|
||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||
for examples and highlights.
|
||||
|
||||
To install spack and your first package, make sure you have Python & Git.
|
||||
Installation
|
||||
----------------
|
||||
|
||||
To install spack, first make sure you have Python & Git.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
```bash
|
||||
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
|
||||
<br>
|
||||
|
||||
> [!TIP]
|
||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||
>
|
||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
</details>
|
||||
|
||||
```bash
|
||||
# For bash/zsh/sh
|
||||
. spack/share/spack/setup-env.sh
|
||||
|
||||
# For tcsh/csh
|
||||
source spack/share/spack/setup-env.csh
|
||||
|
||||
# For fish
|
||||
. spack/share/spack/setup-env.fish
|
||||
```
|
||||
|
||||
```bash
|
||||
# Now you're ready to install a package!
|
||||
spack install zlib-ng
|
||||
```
|
||||
|
||||
Documentation
|
||||
----------------
|
||||
|
||||
|
@ -90,10 +90,9 @@ config:
|
||||
misc_cache: $user_cache_path/cache
|
||||
|
||||
|
||||
# Timeout in seconds used for downloading sources etc. This only applies
|
||||
# to the connection phase and can be increased for slow connections or
|
||||
# servers. 0 means no timeout.
|
||||
connect_timeout: 10
|
||||
# Abort downloads after this many seconds if not data is received.
|
||||
# Setting this to 0 will disable the timeout.
|
||||
connect_timeout: 30
|
||||
|
||||
|
||||
# If this is false, tools like curl that use SSL will not verify
|
||||
|
@ -25,6 +25,8 @@ packages:
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-clang:
|
||||
buildable: false
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
|
@ -72,6 +72,8 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cce:
|
||||
buildable: false
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
@ -86,13 +88,23 @@ packages:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fj:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
glibc:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
iconv:
|
||||
prefer: [libiconv]
|
||||
mpt:
|
||||
buildable: false
|
||||
musl:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
xl:
|
||||
buildable: false
|
||||
|
@ -23,3 +23,5 @@ packages:
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
msvc:
|
||||
buildable: false
|
||||
|
@ -1291,55 +1291,61 @@ based on site policies.
|
||||
Variants
|
||||
^^^^^^^^
|
||||
|
||||
Variants are named options associated with a particular package. They are
|
||||
optional, as each package must provide default values for each variant it
|
||||
makes available. Variants can be specified using
|
||||
a flexible parameter syntax ``name=<value>``. For example,
|
||||
``spack install mercury debug=True`` will install mercury built with debug
|
||||
flags. The names of particular variants available for a package depend on
|
||||
Variants are named options associated with a particular package and are
|
||||
typically used to enable or disable certain features at build time. They
|
||||
are optional, as each package must provide default values for each variant
|
||||
it makes available.
|
||||
|
||||
The names of variants available for a particular package depend on
|
||||
what was provided by the package author. ``spack info <package>`` will
|
||||
provide information on what build variants are available.
|
||||
|
||||
For compatibility with earlier versions, variants which happen to be
|
||||
boolean in nature can be specified by a syntax that represents turning
|
||||
options on and off. For example, in the previous spec we could have
|
||||
supplied ``mercury +debug`` with the same effect of enabling the debug
|
||||
compile time option for the libelf package.
|
||||
There are different types of variants:
|
||||
|
||||
Depending on the package a variant may have any default value. For
|
||||
``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||
with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
|
||||
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||
1. Boolean variants. Typically used to enable or disable a feature at
|
||||
compile time. For example, a package might have a ``debug`` variant that
|
||||
can be explicitly enabled with ``+debug`` and disabled with ``~debug``.
|
||||
2. Single-valued variants. Often used to set defaults. For example, a package
|
||||
might have a ``compression`` variant that determines the default
|
||||
compression algorithm, which users could set to ``compression=gzip`` or
|
||||
``compression=zstd``.
|
||||
3. Multi-valued variants. A package might have a ``fabrics`` variant that
|
||||
determines which network fabrics to support. Users could set this to
|
||||
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
|
||||
interfaces. The values are separated by commas.
|
||||
|
||||
There are two syntaxes here because, depending on context, ``~`` and
|
||||
``-`` may mean different things. In most shells, the following will
|
||||
result in the shell performing home directory substitution:
|
||||
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified
|
||||
fabrics, but other fabrics may be enabled as well. If the intent is to
|
||||
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi``
|
||||
syntax should be used with the ``:=`` operator.
|
||||
|
||||
.. code-block:: sh
|
||||
.. note::
|
||||
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
In certain shells, the the ``~`` character is expanded to the home
|
||||
directory. To avoid these issues, avoid whitespace between the package
|
||||
name and the variant:
|
||||
|
||||
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||
expanded. In this situation, you would want to write ``libelf
|
||||
-debug``. However, ``-`` can be ambiguous when included after a
|
||||
package name without spaces:
|
||||
.. code-block:: sh
|
||||
|
||||
.. code-block:: sh
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
|
||||
mpileaks-debug # wrong!
|
||||
mpileaks -debug # right
|
||||
Alternatively, you can use the ``-`` character to disable a variant,
|
||||
but be aware that this requires a space between the package name and
|
||||
the variant:
|
||||
|
||||
Spack allows the ``-`` character to be part of package names, so the
|
||||
above will be interpreted as a request for the ``mpileaks-debug``
|
||||
package, not a request for ``mpileaks`` built without ``debug``
|
||||
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||
avoid ambiguity.
|
||||
.. code-block:: sh
|
||||
|
||||
When spack normalizes specs, it prints them out with no spaces boolean
|
||||
variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. The ``-`` and spaces on the command
|
||||
line are provided for convenience and legibility.
|
||||
mpileaks-debug # wrong: refers to a package named "mpileaks-debug"
|
||||
mpileaks -debug # right: refers to a package named mpileaks with debug disabled
|
||||
|
||||
As a last resort, ``debug=False`` can also be used to disable a boolean variant.
|
||||
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Variant propagation to dependencies
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
Spack allows variants to propagate their value to the package's
|
||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||
|
@ -91,7 +91,7 @@ there are any other variables you need to set, you can do this in the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
|
||||
|
@ -225,8 +225,10 @@ def setup(sphinx):
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@ -46,6 +46,12 @@ Each Spack configuration file is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
|
||||
.. tip::
|
||||
|
||||
Validation and autocompletion of Spack config files can be enabled in
|
||||
your editor with the YAML language server. See `spack/schemas
|
||||
<https://github.com/spack/schemas>`_ for more information.
|
||||
|
||||
.. _configuration-scopes:
|
||||
|
||||
--------------------
|
||||
|
34
lib/spack/docs/env_vars_yaml.rst
Normal file
34
lib/spack/docs/env_vars_yaml.rst
Normal file
@ -0,0 +1,34 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _env-vars-yaml:
|
||||
|
||||
=============================================
|
||||
Environment Variable Settings (env_vars.yaml)
|
||||
=============================================
|
||||
|
||||
Spack allows you to include shell environment variable modifications
|
||||
for a spack environment by including an ``env_vars.yaml``. Environment
|
||||
varaibles can be modified by setting, unsetting, appending, and prepending
|
||||
variables in the shell environment.
|
||||
The changes to the shell environment will take effect when the spack
|
||||
environment is activated.
|
||||
|
||||
for example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
|
@ -1000,6 +1000,28 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
-------------------------------
|
||||
Modifying Environment Variables
|
||||
-------------------------------
|
||||
|
||||
Spack Environments can modify the active shell's environment variables when activated. The environment can be
|
||||
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
|
||||
file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
|
@ -75,6 +75,7 @@ or refer to the full manual below.
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
env_vars_yaml
|
||||
containers
|
||||
mirrors
|
||||
module_file_support
|
||||
|
@ -128,7 +128,7 @@ depend on the spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
|
||||
@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
|
||||
|
@ -5,9 +5,9 @@ sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.27.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.1.2
|
||||
flake8==7.2.0
|
||||
mypy==1.11.1
|
||||
|
13
lib/spack/external/__init__.py
vendored
13
lib/spack/external/__init__.py
vendored
@ -11,6 +11,7 @@
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.3
|
||||
* License: MIT
|
||||
|
||||
archspec
|
||||
--------
|
||||
@ -18,6 +19,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
* License: Apache-2.0 or MIT
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@ -25,6 +27,7 @@
|
||||
* Homepage: https://github.com/simonpercivall/astunparse
|
||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||
* Version: 1.6.3 (plus modifications)
|
||||
* License: PSF-2.0
|
||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||
modified, and we want to track coverage for it.
|
||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||
@ -41,6 +44,7 @@
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 22.1.0
|
||||
* License: MIT
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@ -48,6 +52,7 @@
|
||||
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
||||
* Usage: Functions to parse build logs and extract error messages.
|
||||
* Version: Unversioned
|
||||
* License: BSD-3-Clause
|
||||
* Note: This is a homemade port of Kitware's CTest build handler.
|
||||
|
||||
distro
|
||||
@ -56,6 +61,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.8.0
|
||||
* License: Apache-2.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
@ -63,6 +69,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@ -70,6 +77,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||
* Usage: An implementation of JSON Schema for Python.
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* License: MIT
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
macholib
|
||||
@ -78,6 +86,7 @@
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.16.2
|
||||
* License: MIT
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
@ -85,6 +94,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
@ -92,6 +102,7 @@
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.18.0
|
||||
* License: MIT
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
@ -101,6 +112,7 @@
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.17.21
|
||||
* License: MIT
|
||||
|
||||
six
|
||||
---
|
||||
@ -108,5 +120,6 @@
|
||||
* Homepage: https://pypi.python.org/pypi/six
|
||||
* Usage: Python 2 and 3 compatibility utilities.
|
||||
* Version: 1.16.0
|
||||
* License: MIT
|
||||
|
||||
"""
|
||||
|
@ -764,7 +764,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
|
@ -15,7 +15,19 @@
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@ -1047,19 +1059,28 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
ClassPropertyType = TypeVar("ClassPropertyType")
|
||||
|
||||
|
||||
class classproperty(Generic[ClassPropertyType]):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
the evaluation is injected at creation time and takes an owner (i.e., the class that
|
||||
originated the instance).
|
||||
"""
|
||||
|
||||
def __init__(self, callback):
|
||||
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
def __get__(self, instance, owner) -> ClassPropertyType:
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
#: A type alias that represents either a classproperty descriptor or a constant value of the same
|
||||
#: type. This allows derived classes to override a computed class-level property with a constant
|
||||
#: value while retaining type compatibility.
|
||||
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compiler-classic": "intel",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
@ -15,6 +15,6 @@
|
||||
"clang": "llvm",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel": "intel-oneapi-compiler-classic",
|
||||
"intel": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
||||
|
@ -36,9 +36,11 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
@ -572,12 +574,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
|
||||
if sys.platform == "win32":
|
||||
module.nmake = Executable("nmake")
|
||||
module.msbuild = Executable("msbuild")
|
||||
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc")
|
||||
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc")
|
||||
# analog to configure for win32
|
||||
module.cscript = Executable("cscript")
|
||||
|
||||
@ -1189,11 +1189,9 @@ def _setup_pkg_and_run(
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
"This might be due to a change in Spack's package format "
|
||||
"to support multiple build-systems for a single package. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a bug. "
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
"The '{}' package cannot find an attribute while trying to {}. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a build-error or "
|
||||
"a bug at https://github.com/spack/spack/issues"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
@ -1218,15 +1216,45 @@ def _setup_pkg_and_run(
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
class BuildProcess:
|
||||
def __init__(self, *, target, args) -> None:
|
||||
self.p = multiprocessing.Process(target=target, args=args)
|
||||
|
||||
def start(self) -> None:
|
||||
self.p.start()
|
||||
|
||||
def is_alive(self) -> bool:
|
||||
return self.p.is_alive()
|
||||
|
||||
def join(self, *, timeout: Optional[int] = None):
|
||||
self.p.join(timeout=timeout)
|
||||
|
||||
def terminate(self):
|
||||
# Opportunity for graceful termination
|
||||
self.p.terminate()
|
||||
self.p.join(timeout=1)
|
||||
|
||||
# If the process didn't gracefully terminate, forcefully kill
|
||||
if self.p.is_alive():
|
||||
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
|
||||
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
|
||||
os.kill(self.p.pid, signal.SIGKILL)
|
||||
self.p.join()
|
||||
|
||||
@property
|
||||
def exitcode(self):
|
||||
return self.p.exitcode
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
|
||||
"""Create a child process to do part of a spack build.
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
function (typing.Callable): argless function to run in the child process.
|
||||
timeout: maximum time allowed to finish the execution of function
|
||||
|
||||
Usage::
|
||||
|
||||
@ -1254,14 +1282,14 @@ def child_fun():
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
mflags = os.environ.get("MAKEFLAGS")
|
||||
if mflags is not None:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
p = BuildProcess(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(
|
||||
serialized_pkg,
|
||||
@ -1295,14 +1323,17 @@ def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
p.join(timeout=timeout)
|
||||
if p.is_alive():
|
||||
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
|
||||
p.terminate()
|
||||
p.join()
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
if isinstance(child_result, spack.error.StopPhase):
|
||||
# do not print
|
||||
|
@ -16,6 +16,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
@ -846,7 +847,9 @@ def _remove_libtool_archives(self) -> None:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
|
@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@ -86,7 +87,9 @@ def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
|
@ -47,6 +47,11 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Relative path to compiler wrappers
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
#: Optimization flags
|
||||
opt_flags: Sequence[str] = []
|
||||
#: Flags for generating debug information
|
||||
debug_flags: Sequence[str] = []
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
|
@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@ -68,7 +69,9 @@ class GoBuilder(BuilderWithDefaults):
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
@ -23,6 +23,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@ -1016,7 +1017,7 @@ def libs(self):
|
||||
debug_print(result)
|
||||
return result
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
@ -1049,7 +1050,9 @@ def setup_run_environment(self, env):
|
||||
env.set("F77", self.prefix.bin.ifort)
|
||||
env.set("F90", self.prefix.bin.ifort)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||
@ -1061,7 +1064,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# Handle everything in a callback version.
|
||||
self._setup_dependent_env_callback(env, dependent_spec)
|
||||
|
||||
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
|
||||
def _setup_dependent_env_callback(
|
||||
self,
|
||||
env: EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
compilers_of_client={},
|
||||
) -> None:
|
||||
# Expected to be called from a client's
|
||||
# setup_dependent_build_environment(),
|
||||
# with args extended to convey the client's compilers as needed.
|
||||
|
@ -8,6 +8,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
@ -114,5 +115,7 @@ def install(
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||
|
@ -4,6 +4,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
@ -57,7 +58,9 @@ def install(
|
||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
# octave does not like those environment variables to be set:
|
||||
env.unset("CC")
|
||||
env.unset("CXX")
|
||||
|
@ -106,8 +106,8 @@ def install_component(self, installer_path):
|
||||
|
||||
bash = Executable("bash")
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env("HOME", self.prefix)
|
||||
# Installer writes files in ~/intel set HOME so it goes to staging directory
|
||||
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
||||
|
||||
@ -132,7 +132,7 @@ def install_component(self, installer_path):
|
||||
if not isdir(install_dir):
|
||||
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
@ -13,9 +13,9 @@
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
find_namespace = lang.match_predicate(namespace_re)
|
||||
find_namespace = match_predicate(namespace_re)
|
||||
if self.py_namespace in namespaces:
|
||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||
|
||||
@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
|
||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||
)
|
||||
if self.py_namespace in remaining_namespaces:
|
||||
namespace_init = lang.match_predicate(
|
||||
namespace_init = match_predicate(
|
||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
)
|
||||
ignore_namespace = True
|
||||
@ -324,6 +324,27 @@ def get_external_python_for_prefix(self):
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
||||
"""Get the homepage from PyPI if available."""
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
@ -351,25 +372,9 @@ class PythonPackage(PythonExtension):
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
|
@ -3,8 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@ -54,6 +54,32 @@ def install(self, pkg, spec, prefix):
|
||||
pkg.module.R(*args)
|
||||
|
||||
|
||||
def _homepage(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return None
|
||||
|
||||
|
||||
def _git(cls: "RPackage") -> Optional[str]:
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
"""Specialized class for packages that are built using R.
|
||||
|
||||
@ -77,24 +103,7 @@ class RPackage(Package):
|
||||
|
||||
extends("r")
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
git: ClassProperty[Optional[str]] = classproperty(_git)
|
||||
|
@ -5,8 +5,8 @@
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
@ -19,6 +19,12 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
||||
if cls.racket_name:
|
||||
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
|
||||
return None
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
@ -37,13 +43,7 @@ class RacketPackage(PackageBase):
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.racket_name:
|
||||
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
|
||||
|
||||
@spack.builder.builder("racket")
|
||||
|
@ -185,10 +185,16 @@ def __init__(self, pkg):
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
# catch-all (*args, **kwargs)
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self,
|
||||
env: spack.util.environment.EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
return Adapter(pkg)
|
||||
@ -402,7 +408,7 @@ def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
|
@ -14,7 +14,7 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set, Union
|
||||
from typing import Callable, Dict, List, Optional, Set, Union
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.path
|
||||
@ -24,6 +24,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@ -613,32 +614,40 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_spec, and attempts to copy the files into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Args:
|
||||
Parameters:
|
||||
job_spec: spec associated with spack install log
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
if not job_spec.concrete:
|
||||
tty.warn("Cannot copy artifacts for non-concrete specs")
|
||||
return
|
||||
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
if not os.path.isdir(package_metadata_root):
|
||||
# Fallback to using the stage directory
|
||||
job_pkg = job_spec.package
|
||||
|
||||
package_metadata_root = pathlib.Path(job_pkg.stage.path)
|
||||
archive_files = spack.builder.create(job_pkg).archive_files
|
||||
tty.warn("Package not installed, falling back to use stage dir")
|
||||
tty.debug(f"stage dir: {package_metadata_root}")
|
||||
else:
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if os.path.isdir(archive_root):
|
||||
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
|
||||
else:
|
||||
tty.debug(f"No archived files detected at {archive_root}")
|
||||
|
||||
# Try zipped and unzipped versions of the build log
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_log = package_metadata_root / "spack-build-out.txt"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@ -651,11 +660,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
"""
|
||||
tty.debug(f"test stage: {test_stage}")
|
||||
if not os.path.exists(test_stage):
|
||||
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||
tty.error(msg)
|
||||
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
copy_files_to_artifacts(
|
||||
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
|
||||
)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
@ -1294,35 +1304,34 @@ def display_broken_spec_messages(base_url, hashes):
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def run_standalone_tests(**kwargs):
|
||||
def run_standalone_tests(
|
||||
*,
|
||||
cdash: Optional[CDashHandler] = None,
|
||||
fail_fast: bool = False,
|
||||
log_file: Optional[str] = None,
|
||||
job_spec: Optional[spack.spec.Spec] = None,
|
||||
repro_dir: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
"""Run stand-alone tests on the current spec.
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): dictionary of arguments used to run the tests
|
||||
|
||||
List of recognized keys:
|
||||
|
||||
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||
* "job_spec" (Spec): spec that was built
|
||||
* "repro_dir" (str): reproduction directory
|
||||
Args:
|
||||
cdash: cdash handler instance
|
||||
fail_fast: terminate tests after the first failure
|
||||
log_file: test log file name if NOT CDash reporting
|
||||
job_spec: spec that was built
|
||||
repro_dir: reproduction directory
|
||||
timeout: maximum time (in seconds) that tests are allowed to run
|
||||
"""
|
||||
cdash = kwargs.get("cdash")
|
||||
fail_fast = kwargs.get("fail_fast")
|
||||
log_file = kwargs.get("log_file")
|
||||
|
||||
if cdash and log_file:
|
||||
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||
log_file = None
|
||||
|
||||
# Error out but do NOT terminate if there are missing required arguments.
|
||||
job_spec = kwargs.get("job_spec")
|
||||
if not job_spec:
|
||||
tty.error("Job spec is required to run stand-alone tests")
|
||||
return
|
||||
|
||||
repro_dir = kwargs.get("repro_dir")
|
||||
if not repro_dir:
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
@ -1331,6 +1340,9 @@ def run_standalone_tests(**kwargs):
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
if timeout is not None:
|
||||
test_args.extend(["--timeout", str(timeout)])
|
||||
|
||||
if cdash:
|
||||
test_args.extend(cdash.args())
|
||||
else:
|
||||
|
@ -2,9 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import errno
|
||||
import glob
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
@ -25,6 +29,7 @@
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
@ -40,22 +45,67 @@
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
def copy_gzipped(glob_or_path: str, dest: str) -> None:
|
||||
"""Copy all of the files in the source glob/path to the destination.
|
||||
|
||||
Args:
|
||||
glob_or_path: path to file to test
|
||||
dest: destination path to copy to
|
||||
"""
|
||||
|
||||
files = glob.glob(glob_or_path)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
|
||||
)
|
||||
|
||||
def is_gzipped(path):
|
||||
with open(path, "rb") as fd:
|
||||
return compression.GZipFileType().matches_magic(fd)
|
||||
|
||||
for src in files:
|
||||
if is_gzipped(src):
|
||||
fs.copy(src, dest)
|
||||
else:
|
||||
# Compress and copy in one step
|
||||
src_name = os.path.basename(src)
|
||||
if os.path.isdir(dest):
|
||||
zipped = os.path.join(dest, f"{src_name}.gz")
|
||||
elif not dest.endswith(".gz"):
|
||||
zipped = f"{dest}.gz"
|
||||
else:
|
||||
zipped = dest
|
||||
|
||||
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
|
||||
shutil.copyfileobj(fin, fout)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(
|
||||
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
Args:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
compress_artifacts (bool): option to compress copied artifacts using Gzip
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
|
||||
if compress_artifacts:
|
||||
copy_gzipped(src, artifacts_dir)
|
||||
else:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
tty.warn(
|
||||
(
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
|
@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
specfile_format (bool): specfile format of the current spec
|
||||
"""
|
||||
|
||||
def get_arg(name, default=None):
|
||||
@ -458,6 +458,7 @@ def get_arg(name, default=None):
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
specfile_format = get_arg("specfile_format", False)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@ -479,6 +480,9 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
|
||||
if specfile_format:
|
||||
format_string = "[{specfile_version}] " + format_string
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
@ -76,9 +76,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=False,
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
@ -192,28 +189,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_specs.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror")
|
||||
download.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
@ -223,28 +206,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
@ -380,14 +345,8 @@ def _specs_to_be_packaged(
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
if args.specs:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs))
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
@ -529,22 +488,7 @@ def check_fn(args: argparse.Namespace):
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
specs_arg = args.specs
|
||||
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
@ -578,13 +522,7 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec))
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
@ -593,15 +531,6 @@ def download_fn(args):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
@ -609,13 +538,7 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if args.root_specfile:
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
specs = spack.cmd.parse_specs(args.root_spec)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
@ -160,6 +160,12 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||
|
||||
@ -447,7 +453,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@ -487,6 +493,9 @@ def ci_rebuild(args):
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Clear the stage directory
|
||||
spack.stage.purge()
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
@ -521,6 +530,7 @@ def ci_rebuild(args):
|
||||
fail_fast=args.fail_fast,
|
||||
log_file=log_file,
|
||||
repro_dir=repro_dir,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
|
@ -63,7 +63,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
@ -216,5 +216,6 @@ def compiler(parser, args):
|
||||
"rm": compiler_remove,
|
||||
"info": compiler_info,
|
||||
"list": compiler_list,
|
||||
"ls": compiler_list,
|
||||
}
|
||||
action[args.compiler_command](args)
|
||||
|
@ -102,7 +102,7 @@ def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Sp
|
||||
)
|
||||
else:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
"package Spack knows how to find."
|
||||
)
|
||||
|
||||
sp.add_parser("list", help="list detectable packages, by repository and name")
|
||||
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
@ -259,6 +259,7 @@ def external(parser, args):
|
||||
action = {
|
||||
"find": external_find,
|
||||
"list": external_list,
|
||||
"ls": external_list,
|
||||
"read-cray-manifest": external_read_cray_manifest,
|
||||
}
|
||||
action[args.external_command](args)
|
||||
|
@ -51,6 +51,12 @@ def setup_parser(subparser):
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--specfile-format",
|
||||
action="store_true",
|
||||
help="show the specfile format for installed deps ",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@ -280,6 +286,7 @@ def root_decorator(spec, string):
|
||||
show_flags=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
print()
|
||||
@ -301,6 +308,7 @@ def root_decorator(spec, string):
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
print()
|
||||
|
||||
@ -390,7 +398,12 @@ def find(parser, args):
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
display_results,
|
||||
args,
|
||||
decorator=decorator,
|
||||
all_headers=True,
|
||||
status_fn=status_fn,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
|
@ -136,20 +136,7 @@ def solve(parser, args):
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
if unify == "when_possible":
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
@ -166,3 +153,29 @@ def solve(parser, args):
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
elif unify:
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for spec in specs:
|
||||
tty.msg("SOLVING SPEC:", spec)
|
||||
result = solver.solve(
|
||||
[spec],
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@ -65,6 +65,12 @@ def setup_parser(subparser):
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
|
||||
@ -176,7 +182,7 @@ def test_run(args):
|
||||
for spec in specs:
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
tty.warn(f"No {explicit_str}installed packages match spec {spec}")
|
||||
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
@ -192,7 +198,7 @@ def test_run(args):
|
||||
# test_stage_dir
|
||||
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
||||
test_suite.ensure_stage()
|
||||
tty.msg("Spack test %s" % test_suite.name)
|
||||
tty.msg(f"Spack test {test_suite.name}")
|
||||
|
||||
# Set up reporter
|
||||
setattr(args, "package", [s.format() for s in test_suite.specs])
|
||||
@ -204,6 +210,7 @@ def test_run(args):
|
||||
dirty=args.dirty,
|
||||
fail_first=args.fail_first,
|
||||
externals=args.externals,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
|
||||
|
@ -18,6 +18,10 @@ class Languages(enum.Enum):
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
"""Provides access to compiler attributes via `Package.compiler`. Useful for
|
||||
packages which do not yet access compiler properties via `self.spec[language]`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||
) -> None:
|
||||
@ -79,6 +83,14 @@ def implicit_rpaths(self) -> List[str]:
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def opt_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.opt_flags
|
||||
|
||||
@property
|
||||
def debug_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.debug_flags
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
@ -140,7 +152,7 @@ def c17_flag(self) -> str:
|
||||
@property
|
||||
def c23_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
language=Languages.C.value, standard="23"
|
||||
)
|
||||
|
||||
@property
|
||||
@ -190,6 +202,10 @@ def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
|
@ -149,12 +149,12 @@ def _getfqdn():
|
||||
return socket.getfqdn()
|
||||
|
||||
|
||||
def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
vn.Version("8"): spack.spec.SpecfileV5,
|
||||
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1,
|
||||
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3,
|
||||
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4,
|
||||
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
@ -824,7 +824,7 @@ def check(cond, msg):
|
||||
db = fdata["database"]
|
||||
check("version" in db, "no 'version' in JSON DB.")
|
||||
|
||||
self.db_version = vn.Version(db["version"])
|
||||
self.db_version = vn.StandardVersion.from_string(db["version"])
|
||||
if self.db_version > _DB_VERSION:
|
||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
|
||||
elif self.db_version < _DB_VERSION:
|
||||
|
@ -20,7 +20,7 @@
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@ -93,14 +93,13 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
tty.warn(f"Constructed spec for {spec.name} does not have a string representation")
|
||||
return False
|
||||
|
||||
try:
|
||||
spack.spec.Spec(str(spec))
|
||||
except spack.error.SpackError:
|
||||
llnl.util.tty.warn(
|
||||
tty.warn(
|
||||
"Constructed spec has a string representation but the string"
|
||||
" representation does not evaluate to a valid spec: {0}".format(str(spec))
|
||||
)
|
||||
@ -109,20 +108,24 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
def path_to_dict(search_paths: List[str]) -> Dict[str, str]:
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
path_to_lib: Dict[str, str] = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
dir_iter = os.scandir(search_path)
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
continue
|
||||
with dir_iter as entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
if entry.is_file():
|
||||
path_to_lib[entry.path] = entry.name
|
||||
except OSError as e:
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
|
||||
return path_to_lib
|
||||
|
||||
|
@ -610,7 +610,7 @@ def _execute_patch(
|
||||
return _execute_patch
|
||||
|
||||
|
||||
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
|
||||
"""Conditional values that can be used in variant declarations."""
|
||||
# _make_when_spec returns None when the condition is statically false.
|
||||
when = _make_when_spec(when)
|
||||
|
@ -144,7 +144,6 @@ class Foo(Package):
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
@ -31,7 +31,6 @@
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.spec_list
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
@ -44,10 +43,10 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
from ..enums import ConfigScopePriority
|
||||
from .list import SpecList, SpecListError, SpecListParser
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
@ -932,8 +931,10 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.new_specs: List[Spec] = []
|
||||
self.views: Dict[str, ViewDescriptor] = {}
|
||||
|
||||
#: Parser for spec lists
|
||||
self._spec_lists_parser = SpecListParser()
|
||||
#: Specs from "spack.yaml"
|
||||
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
|
||||
self.spec_lists: Dict[str, SpecList] = {}
|
||||
#: User specs from the last concretization
|
||||
self.concretized_user_specs: List[Spec] = []
|
||||
#: Roots associated with the last concretization, in order
|
||||
@ -1001,26 +1002,6 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, entry):
|
||||
"""Process a single spec definition item."""
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = spack.spec.eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
assert len(entry) == 1
|
||||
|
||||
if when:
|
||||
for name, spec_list in entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
|
||||
@ -1082,21 +1063,24 @@ def _process_concrete_includes(self):
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.views = {}
|
||||
self._sync_speclists()
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
def _sync_speclists(self):
|
||||
self.spec_lists = {}
|
||||
self.spec_lists.update(
|
||||
self._spec_lists_parser.parse_definitions(
|
||||
data=spack.config.CONFIG.get("definitions", [])
|
||||
)
|
||||
)
|
||||
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs(
|
||||
name=user_speclist_name, yaml_list=spec_list
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
@ -1167,9 +1151,7 @@ def clear(self, re_read=False):
|
||||
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
||||
and needs to be maintained when re-reading an existing environment.
|
||||
"""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self.spec_lists = {}
|
||||
self._dev_specs = {}
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
@ -1276,22 +1258,6 @@ def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def update_stale_references(self, from_list=None):
|
||||
"""Iterate over spec lists updating references."""
|
||||
if not from_list:
|
||||
from_list = next(iter(self.spec_lists.keys()))
|
||||
index = list(self.spec_lists.keys()).index(from_list)
|
||||
|
||||
# spec_lists is an OrderedDict to ensure lists read from the manifest
|
||||
# are maintainted in order, hence, all list entries after the modified
|
||||
# list may refer to the modified list requiring stale references to be
|
||||
# updated.
|
||||
for i, (name, speclist) in enumerate(
|
||||
list(self.spec_lists.items())[index + 1 :], index + 1
|
||||
):
|
||||
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
|
||||
speclist.update_reference(new_reference)
|
||||
|
||||
def add(self, user_spec, list_name=user_speclist_name):
|
||||
"""Add a single user_spec (non-concretized) to the Environment
|
||||
|
||||
@ -1311,18 +1277,17 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
raise SpackEnvironmentError(f"no such package: {spec.name}")
|
||||
|
||||
list_to_change = self.spec_lists[list_name]
|
||||
existing = str(spec) in list_to_change.yaml_list
|
||||
if not existing:
|
||||
list_to_change.add(str(spec))
|
||||
self.update_stale_references(list_name)
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.add_user_spec(str(user_spec))
|
||||
else:
|
||||
self.manifest.add_definition(str(user_spec), list_name=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
return bool(not existing)
|
||||
|
||||
@ -1366,18 +1331,17 @@ def change_existing_spec(
|
||||
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
||||
)
|
||||
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
||||
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
|
||||
raise ValueError(f"{str(match_spec)} matches multiple specs")
|
||||
|
||||
for idx, spec in matches:
|
||||
override_spec = Spec.override(spec, change_spec)
|
||||
self.spec_lists[list_name].replace(idx, str(override_spec))
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
||||
else:
|
||||
self.manifest.override_definition(
|
||||
str(spec), override=str(override_spec), list_name=list_name
|
||||
)
|
||||
self.update_stale_references(from_list=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
@ -1405,22 +1369,17 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
|
||||
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
|
||||
# Remove specs from the appropriate spec list
|
||||
for spec in matches:
|
||||
if spec not in list_to_change:
|
||||
continue
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError as e:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
except SpecListError as e:
|
||||
msg = str(e)
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
@ -1428,7 +1387,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
else:
|
||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||
|
||||
# If force, update stale concretized specs
|
||||
# Recompute "definitions" and user specs
|
||||
self._sync_speclists()
|
||||
new_specs = set(self.user_specs)
|
||||
|
||||
# If 'force', update stale concretized specs
|
||||
for spec in old_specs - new_specs:
|
||||
if force and spec in self.concretized_user_specs:
|
||||
i = self.concretized_user_specs.index(spec)
|
||||
@ -1642,23 +1605,6 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
# Re-attach information on test dependencies
|
||||
if tests:
|
||||
# This is slow, but the information on test dependency is lost
|
||||
# after unification or when reading from a lockfile.
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
return concretized_specs
|
||||
|
||||
@property
|
||||
@ -2827,6 +2773,8 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@ -2853,6 +2801,8 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@ -2878,6 +2828,8 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
|
@ -2,36 +2,24 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
from typing import List
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
import spack.variant
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class SpecList:
|
||||
def __init__(self, name="specs", yaml_list=None, reference=None):
|
||||
# Normalize input arguments
|
||||
yaml_list = yaml_list or []
|
||||
reference = reference or {}
|
||||
|
||||
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
|
||||
self.name = name
|
||||
self._reference = reference # TODO: Do we need defensive copy here?
|
||||
|
||||
# Validate yaml_list before assigning
|
||||
if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
|
||||
raise ValueError(
|
||||
"yaml_list can contain only valid YAML types! Found:\n %s"
|
||||
% [type(s) for s in yaml_list]
|
||||
)
|
||||
self.yaml_list = yaml_list[:]
|
||||
|
||||
self.yaml_list = yaml_list[:] if yaml_list is not None else []
|
||||
# Expansions can be expensive to compute and difficult to keep updated
|
||||
# We cache results and invalidate when self.yaml_list changes
|
||||
self._expanded_list = None
|
||||
self.specs_as_yaml_list = expanded_list or []
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
self._specs: Optional[List[Spec]] = None
|
||||
|
||||
@property
|
||||
def is_matrix(self):
|
||||
@ -40,12 +28,6 @@ def is_matrix(self):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def specs_as_yaml_list(self):
|
||||
if self._expanded_list is None:
|
||||
self._expanded_list = self._expand_references(self.yaml_list)
|
||||
return self._expanded_list
|
||||
|
||||
@property
|
||||
def specs_as_constraints(self):
|
||||
if self._constraints is None:
|
||||
@ -62,7 +44,7 @@ def specs_as_constraints(self):
|
||||
@property
|
||||
def specs(self) -> List[Spec]:
|
||||
if self._specs is None:
|
||||
specs = []
|
||||
specs: List[Spec] = []
|
||||
# This could be slightly faster done directly from yaml_list,
|
||||
# but this way is easier to maintain.
|
||||
for constraint_list in self.specs_as_constraints:
|
||||
@ -74,12 +56,13 @@ def specs(self) -> List[Spec]:
|
||||
|
||||
return self._specs
|
||||
|
||||
def add(self, spec):
|
||||
self.yaml_list.append(str(spec))
|
||||
def add(self, spec: Spec):
|
||||
spec_str = str(spec)
|
||||
self.yaml_list.append(spec_str)
|
||||
|
||||
# expanded list can be updated without invalidation
|
||||
if self._expanded_list is not None:
|
||||
self._expanded_list.append(str(spec))
|
||||
if self.specs_as_yaml_list is not None:
|
||||
self.specs_as_yaml_list.append(spec_str)
|
||||
|
||||
# Invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
@ -101,83 +84,18 @@ def remove(self, spec):
|
||||
# Remove may contain more than one string representation of the same spec
|
||||
for item in remove:
|
||||
self.yaml_list.remove(item)
|
||||
self.specs_as_yaml_list.remove(item)
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def replace(self, idx: int, spec: str):
|
||||
"""Replace the existing spec at the index with the new one.
|
||||
|
||||
Args:
|
||||
idx: index of the spec to replace in the speclist
|
||||
spec: new spec
|
||||
"""
|
||||
self.yaml_list[idx] = spec
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def extend(self, other, copy_reference=True):
|
||||
def extend(self, other: "SpecList", copy_reference=True) -> None:
|
||||
self.yaml_list.extend(other.yaml_list)
|
||||
self._expanded_list = None
|
||||
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
if copy_reference:
|
||||
self._reference = other._reference
|
||||
|
||||
def update_reference(self, reference):
|
||||
self._reference = reference
|
||||
self._expanded_list = None
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def _parse_reference(self, name):
|
||||
sigil = ""
|
||||
name = name[1:]
|
||||
|
||||
# Parse specs as constraints
|
||||
if name.startswith("^") or name.startswith("%"):
|
||||
sigil = name[0]
|
||||
name = name[1:]
|
||||
|
||||
# Make sure the reference is valid
|
||||
if name not in self._reference:
|
||||
msg = f"SpecList '{self.name}' refers to named list '{name}'"
|
||||
msg += " which does not appear in its reference dict."
|
||||
raise UndefinedReferenceError(msg)
|
||||
|
||||
return (name, sigil)
|
||||
|
||||
def _expand_references(self, yaml):
|
||||
if isinstance(yaml, list):
|
||||
ret = []
|
||||
|
||||
for item in yaml:
|
||||
# if it's a reference, expand it
|
||||
if isinstance(item, str) and item.startswith("$"):
|
||||
# replace the reference and apply the sigil if needed
|
||||
name, sigil = self._parse_reference(item)
|
||||
|
||||
referent = [
|
||||
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
|
||||
]
|
||||
ret.extend(referent)
|
||||
else:
|
||||
# else just recurse
|
||||
ret.append(self._expand_references(item))
|
||||
return ret
|
||||
elif isinstance(yaml, dict):
|
||||
# There can't be expansions in dicts
|
||||
return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
|
||||
else:
|
||||
# Strings are just returned
|
||||
return yaml
|
||||
|
||||
def __len__(self):
|
||||
return len(self.specs)
|
||||
|
||||
@ -251,6 +169,111 @@ def _sigilify(item, sigil):
|
||||
return sigil + item
|
||||
|
||||
|
||||
class Definition(NamedTuple):
|
||||
name: str
|
||||
yaml_list: List[Union[str, Dict]]
|
||||
when: Optional[str]
|
||||
|
||||
|
||||
class SpecListParser:
|
||||
"""Parse definitions and user specs from data in environments"""
|
||||
|
||||
def __init__(self):
|
||||
self.definitions: Dict[str, SpecList] = {}
|
||||
|
||||
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
|
||||
definitions_from_yaml: Dict[str, List[Definition]] = {}
|
||||
for item in data:
|
||||
value = self._parse_yaml_definition(item)
|
||||
definitions_from_yaml.setdefault(value.name, []).append(value)
|
||||
|
||||
self.definitions = {}
|
||||
self._build_definitions(definitions_from_yaml)
|
||||
return self.definitions
|
||||
|
||||
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
|
||||
definition = Definition(name=name, yaml_list=yaml_list, when=None)
|
||||
return self._speclist_from_definitions(name, [definition])
|
||||
|
||||
def _parse_yaml_definition(self, yaml_entry) -> Definition:
|
||||
when_string = yaml_entry.get("when")
|
||||
|
||||
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
attributes = ", ".join(x for x in yaml_entry if x != "when")
|
||||
error_msg = f"definition must have a single attribute, got many: {attributes}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
for name, yaml_list in yaml_entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
return Definition(name=name, yaml_list=yaml_list, when=when_string)
|
||||
|
||||
# If we are here, it means only "when" is in the entry
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
error_msg = "definition must have a single attribute, got none"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
|
||||
for name, definitions in definitions_from_yaml.items():
|
||||
self.definitions[name] = self._speclist_from_definitions(name, definitions)
|
||||
|
||||
def _speclist_from_definitions(self, name, definitions) -> SpecList:
|
||||
combined_yaml_list = []
|
||||
for def_part in definitions:
|
||||
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
|
||||
continue
|
||||
combined_yaml_list.extend(def_part.yaml_list)
|
||||
expanded_list = self._expand_yaml_list(combined_yaml_list)
|
||||
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
|
||||
|
||||
def _expand_yaml_list(self, raw_yaml_list):
|
||||
result = []
|
||||
for item in raw_yaml_list:
|
||||
if isinstance(item, str) and item.startswith("$"):
|
||||
result.extend(self._expand_reference(item))
|
||||
continue
|
||||
|
||||
value = item
|
||||
if isinstance(item, dict):
|
||||
value = self._expand_yaml_matrix(item)
|
||||
result.append(value)
|
||||
return result
|
||||
|
||||
def _expand_reference(self, item: str):
|
||||
sigil, name = "", item[1:]
|
||||
if name.startswith("^") or name.startswith("%"):
|
||||
sigil, name = name[0], name[1:]
|
||||
|
||||
if name not in self.definitions:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
|
||||
error_msg = f"trying to expand the name '{name}', which is not defined yet"
|
||||
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
value = self.definitions[name].specs_as_yaml_list
|
||||
if not sigil:
|
||||
return value
|
||||
return [_sigilify(x, sigil) for x in value]
|
||||
|
||||
def _expand_yaml_matrix(self, matrix_yaml):
|
||||
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
|
||||
if extra_attributes:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
if "matrix" not in matrix_yaml:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = "matrix is missing the 'matrix' attribute"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
# Assume data has been validated against the YAML schema
|
||||
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
|
||||
if "exclude" in matrix_yaml:
|
||||
result["exclude"] = matrix_yaml["exclude"]
|
||||
return result
|
||||
|
||||
|
||||
class SpecListError(SpackError):
|
||||
"""Error class for all errors related to SpecList objects."""
|
||||
|
@ -49,10 +49,23 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
if prompt:
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if not old_prompt:
|
||||
old_prompt = os.environ.get("PROMPT")
|
||||
cmds += f'set "SPACK_OLD_PROMPT={old_prompt}"\n'
|
||||
cmds += f'set "PROMPT={prompt} $P$G"\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
if prompt:
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; if(!"$Env:SPACK_OLD_PROMPT") '
|
||||
'{$Env:SPACK_OLD_PROMPT="[spack] PS $pth>"}; '
|
||||
'"%s PS $pth>"}\n' % prompt
|
||||
)
|
||||
else:
|
||||
bash_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=True)
|
||||
zsh_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=False, zsh=True)
|
||||
@ -107,10 +120,19 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if old_prompt:
|
||||
cmds += f'set "PROMPT={old_prompt}"\n'
|
||||
cmds += 'set "SPACK_OLD_PROMPT="\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; $spack_prompt = "[spack] $pth >"; '
|
||||
'if("$Env:SPACK_OLD_PROMPT") {$spack_prompt=$Env:SPACK_OLD_PROMPT};'
|
||||
" $spack_prompt}\n"
|
||||
)
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
@ -27,11 +27,14 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
from typing import Callable, List, Mapping, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
@ -219,6 +222,114 @@ def mirror_id(self):
|
||||
"""BundlePackages don't have a mirror id."""
|
||||
|
||||
|
||||
def _format_speed(total_bytes: int, elapsed: float) -> str:
|
||||
"""Return a human-readable average download speed string."""
|
||||
elapsed = 1 if elapsed <= 0 else elapsed # avoid divide by zero
|
||||
speed = total_bytes / elapsed
|
||||
if speed >= 1e9:
|
||||
return f"{speed / 1e9:6.1f} GB/s"
|
||||
elif speed >= 1e6:
|
||||
return f"{speed / 1e6:6.1f} MB/s"
|
||||
elif speed >= 1e3:
|
||||
return f"{speed / 1e3:6.1f} KB/s"
|
||||
return f"{speed:6.1f} B/s"
|
||||
|
||||
|
||||
def _format_bytes(total_bytes: int) -> str:
|
||||
"""Return a human-readable total bytes string."""
|
||||
if total_bytes >= 1e9:
|
||||
return f"{total_bytes / 1e9:7.2f} GB"
|
||||
elif total_bytes >= 1e6:
|
||||
return f"{total_bytes / 1e6:7.2f} MB"
|
||||
elif total_bytes >= 1e3:
|
||||
return f"{total_bytes / 1e3:7.2f} KB"
|
||||
return f"{total_bytes:7.2f} B"
|
||||
|
||||
|
||||
class FetchProgress:
|
||||
#: Characters to rotate in the spinner.
|
||||
spinner = ["|", "/", "-", "\\"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
total_bytes: Optional[int] = None,
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> None:
|
||||
"""Initialize a FetchProgress instance.
|
||||
Args:
|
||||
total_bytes: Total number of bytes to download, if known.
|
||||
enabled: Whether to print progress information.
|
||||
get_time: Function to get the current time."""
|
||||
#: Number of bytes downloaded so far.
|
||||
self.current_bytes = 0
|
||||
#: Delta time between progress prints
|
||||
self.delta = 0.1
|
||||
#: Whether to print progress information.
|
||||
self.enabled = enabled
|
||||
#: Function to get the current time.
|
||||
self.get_time = get_time
|
||||
#: Time of last progress print to limit output
|
||||
self.last_printed = 0.0
|
||||
#: Time of start of download
|
||||
self.start_time = get_time() if enabled else 0.0
|
||||
#: Total number of bytes to download, if known.
|
||||
self.total_bytes = total_bytes if total_bytes and total_bytes > 0 else 0
|
||||
#: Index of spinner character to print (used if total bytes is unknown)
|
||||
self.index = 0
|
||||
|
||||
@classmethod
|
||||
def from_headers(
|
||||
cls,
|
||||
headers: Mapping[str, str],
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> "FetchProgress":
|
||||
"""Create a FetchProgress instance from HTTP headers."""
|
||||
# headers.get is case-insensitive if it's from a HTTPResponse object.
|
||||
content_length = headers.get("Content-Length")
|
||||
try:
|
||||
total_bytes = int(content_length) if content_length else None
|
||||
except ValueError:
|
||||
total_bytes = None
|
||||
return cls(total_bytes=total_bytes, enabled=enabled, get_time=get_time)
|
||||
|
||||
def advance(self, num_bytes: int, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
self.current_bytes += num_bytes
|
||||
self.print(out=out)
|
||||
|
||||
def print(self, final: bool = False, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
current_time = self.get_time()
|
||||
if self.last_printed + self.delta < current_time or final:
|
||||
self.last_printed = current_time
|
||||
# print a newline if this is the final update
|
||||
maybe_newline = "\n" if final else ""
|
||||
# if we know the total bytes, show a percentage, otherwise a spinner
|
||||
if self.total_bytes > 0:
|
||||
percentage = min(100 * self.current_bytes / self.total_bytes, 100.0)
|
||||
percent_or_spinner = f"[{percentage:3.0f}%] "
|
||||
else:
|
||||
# only show the spinner if we are not at 100%
|
||||
if final:
|
||||
percent_or_spinner = "[100%] "
|
||||
else:
|
||||
percent_or_spinner = f"[ {self.spinner[self.index]} ] "
|
||||
self.index = (self.index + 1) % len(self.spinner)
|
||||
|
||||
print(
|
||||
f"\r {percent_or_spinner}{_format_bytes(self.current_bytes)} "
|
||||
f"@ {_format_speed(self.current_bytes, current_time - self.start_time)}"
|
||||
f"{maybe_newline}",
|
||||
end="",
|
||||
flush=True,
|
||||
file=out,
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
"""URLFetchStrategy pulls source code from a URL for an archive, check the
|
||||
@ -316,7 +427,7 @@ def _check_headers(self, headers):
|
||||
tty.warn(msg)
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
def _fetch_urllib(self, url, chunk_size=65536):
|
||||
save_file = self.stage.save_filename
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
@ -327,8 +438,15 @@ def _fetch_urllib(self, url):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
tty.msg(f"Fetching {url}")
|
||||
progress = FetchProgress.from_headers(response.headers, enabled=sys.stdout.isatty())
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
progress.advance(len(chunk))
|
||||
progress.print(final=True)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
|
@ -12,7 +12,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from collections import Counter, OrderedDict
|
||||
from typing import Callable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
from typing import Callable, Iterable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@ -391,7 +391,7 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
|
||||
if self.test_failures:
|
||||
raise TestFailure(self.test_failures)
|
||||
|
||||
def stand_alone_tests(self, kwargs):
|
||||
def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
|
||||
"""Run the package's stand-alone tests.
|
||||
|
||||
Args:
|
||||
@ -399,7 +399,9 @@ def stand_alone_tests(self, kwargs):
|
||||
"""
|
||||
import spack.build_environment # avoid circular dependency
|
||||
|
||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||
spack.build_environment.start_build_process(
|
||||
self.pkg, test_process, kwargs, timeout=timeout
|
||||
)
|
||||
|
||||
def parts(self) -> int:
|
||||
"""The total number of (checked) test parts."""
|
||||
@ -847,7 +849,7 @@ def write_test_summary(counts: "Counter"):
|
||||
class TestSuite:
|
||||
"""The class that manages specs for ``spack test run`` execution."""
|
||||
|
||||
def __init__(self, specs, alias=None):
|
||||
def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None:
|
||||
# copy so that different test suites have different package objects
|
||||
# even if they contain the same spec
|
||||
self.specs = [spec.copy() for spec in specs]
|
||||
@ -855,42 +857,43 @@ def __init__(self, specs, alias=None):
|
||||
self.current_base_spec = None # spec currently running do_test
|
||||
|
||||
self.alias = alias
|
||||
self._hash = None
|
||||
self._stage = None
|
||||
self._hash: Optional[str] = None
|
||||
self._stage: Optional[Prefix] = None
|
||||
|
||||
self.counts: "Counter" = Counter()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""The name (alias or, if none, hash) of the test suite."""
|
||||
return self.alias if self.alias else self.content_hash
|
||||
|
||||
@property
|
||||
def content_hash(self):
|
||||
def content_hash(self) -> str:
|
||||
"""The hash used to uniquely identify the test suite."""
|
||||
if not self._hash:
|
||||
json_text = sjson.dump(self.to_dict())
|
||||
assert json_text is not None, f"{__name__} unexpected value for 'json_text'"
|
||||
sha = hashlib.sha1(json_text.encode("utf-8"))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
b32_hash = b32_hash.decode("utf-8")
|
||||
self._hash = b32_hash
|
||||
return self._hash
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
def __call__(
|
||||
self,
|
||||
*,
|
||||
remove_directory: bool = True,
|
||||
dirty: bool = False,
|
||||
fail_first: bool = False,
|
||||
externals: bool = False,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
self.write_reproducibility_data()
|
||||
|
||||
remove_directory = kwargs.get("remove_directory", True)
|
||||
dirty = kwargs.get("dirty", False)
|
||||
fail_first = kwargs.get("fail_first", False)
|
||||
externals = kwargs.get("externals", False)
|
||||
|
||||
for spec in self.specs:
|
||||
try:
|
||||
if spec.package.test_suite:
|
||||
raise TestSuiteSpecError(
|
||||
"Package {} cannot be run in two test suites at once".format(
|
||||
spec.package.name
|
||||
)
|
||||
f"Package {spec.package.name} cannot be run in two test suites at once"
|
||||
)
|
||||
|
||||
# Set up the test suite to know which test is running
|
||||
@ -905,7 +908,7 @@ def __call__(self, *args, **kwargs):
|
||||
fs.mkdirp(test_dir)
|
||||
|
||||
# run the package tests
|
||||
spec.package.do_test(dirty=dirty, externals=externals)
|
||||
spec.package.do_test(dirty=dirty, externals=externals, timeout=timeout)
|
||||
|
||||
# Clean up on success
|
||||
if remove_directory:
|
||||
@ -956,15 +959,12 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus:
|
||||
"""Returns the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
@ -981,109 +981,84 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
def ensure_stage(self) -> None:
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
fs.mkdirp(self.stage)
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
"""The root test suite stage directory.
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
"""
|
||||
def stage(self) -> Prefix:
|
||||
"""The root test suite stage directory"""
|
||||
if not self._stage:
|
||||
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, value):
|
||||
def stage(self, value: Union[Prefix, str]) -> None:
|
||||
"""Set the value of a non-default stage directory."""
|
||||
self._stage = value if isinstance(value, Prefix) else Prefix(value)
|
||||
|
||||
@property
|
||||
def results_file(self):
|
||||
def results_file(self) -> Prefix:
|
||||
"""The path to the results summary file."""
|
||||
return self.stage.join(results_filename)
|
||||
|
||||
@classmethod
|
||||
def test_pkg_id(cls, spec):
|
||||
def test_pkg_id(cls, spec: Spec) -> str:
|
||||
"""The standard install test package identifier.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the install test package identifier
|
||||
"""
|
||||
return spec.format_path("{name}-{version}-{hash:7}")
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
def test_log_name(cls, spec: Spec) -> str:
|
||||
"""The standard log filename for a spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's log filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-test-out.txt" % cls.test_pkg_id(spec)
|
||||
return f"{cls.test_pkg_id(spec)}-test-out.txt"
|
||||
|
||||
def log_file_for_spec(self, spec):
|
||||
def log_file_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The test log file path for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the path to the spec's log file
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return self.stage.join(self.test_log_name(spec))
|
||||
|
||||
def test_dir_for_spec(self, spec):
|
||||
def test_dir_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The path to the test stage directory for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return Prefix(self.stage.join(self.test_pkg_id(spec)))
|
||||
|
||||
@classmethod
|
||||
def tested_file_name(cls, spec):
|
||||
def tested_file_name(cls, spec: Spec) -> str:
|
||||
"""The standard test status filename for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-tested.txt" % cls.test_pkg_id(spec)
|
||||
|
||||
def tested_file_for_spec(self, spec):
|
||||
def tested_file_for_spec(self, spec: Spec) -> str:
|
||||
"""The test status file path for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status file path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return fs.join_path(self.stage, self.tested_file_name(spec))
|
||||
|
||||
@property
|
||||
def current_test_cache_dir(self):
|
||||
def current_test_cache_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's cached
|
||||
build-time files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, cached build-time files.
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@ -1095,13 +1070,10 @@ def current_test_cache_dir(self):
|
||||
return self.test_dir_for_spec(base_spec).cache.join(test_spec.name)
|
||||
|
||||
@property
|
||||
def current_test_data_dir(self):
|
||||
def current_test_data_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's custom
|
||||
package (data) files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, custom package (data) files
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@ -1112,17 +1084,17 @@ def current_test_data_dir(self):
|
||||
base_spec = self.current_base_spec
|
||||
return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
|
||||
|
||||
def write_test_result(self, spec, result):
|
||||
def write_test_result(self, spec: Spec, result: TestStatus) -> None:
|
||||
"""Write the spec's test result to the test suite results file.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
result (str): result from the spec's test execution (e.g, PASSED)
|
||||
spec: instance of the spec under test
|
||||
result: result from the spec's test execution (e.g, PASSED)
|
||||
"""
|
||||
msg = f"{self.test_pkg_id(spec)} {result}"
|
||||
_add_msg_to_file(self.results_file, msg)
|
||||
|
||||
def write_reproducibility_data(self):
|
||||
def write_reproducibility_data(self) -> None:
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
@ -1167,12 +1139,12 @@ def from_dict(d):
|
||||
return TestSuite(specs, alias)
|
||||
|
||||
@staticmethod
|
||||
def from_file(filename):
|
||||
def from_file(filename: str) -> "TestSuite":
|
||||
"""Instantiate a TestSuite using the specs and optional alias
|
||||
provided in the given file.
|
||||
|
||||
Args:
|
||||
filename (str): The path to the JSON file containing the test
|
||||
filename: The path to the JSON file containing the test
|
||||
suite specs and optional alias.
|
||||
|
||||
Raises:
|
||||
|
@ -20,6 +20,7 @@
|
||||
import signal
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import List, Tuple
|
||||
@ -41,6 +42,7 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.debug
|
||||
@ -1046,6 +1048,10 @@ def main(argv=None):
|
||||
try:
|
||||
return _main(argv)
|
||||
|
||||
except spack.solver.asp.OutputDoesNotSatisfyInputError as e:
|
||||
_handle_solver_bug(e)
|
||||
return 1
|
||||
|
||||
except spack.error.SpackError as e:
|
||||
tty.debug(e)
|
||||
e.die() # gracefully die on any SpackErrors
|
||||
@ -1069,5 +1075,45 @@ def main(argv=None):
|
||||
return 3
|
||||
|
||||
|
||||
def _handle_solver_bug(
|
||||
e: spack.solver.asp.OutputDoesNotSatisfyInputError, out=sys.stderr, root=None
|
||||
) -> None:
|
||||
# when the solver outputs specs that do not satisfy the input and spack is used as a command
|
||||
# line tool, we dump the incorrect output specs to json so users can upload them in bug reports
|
||||
wrong_output = [(input, output) for input, output in e.input_to_output if output is not None]
|
||||
no_output = [input for input, output in e.input_to_output if output is None]
|
||||
if no_output:
|
||||
tty.error(
|
||||
"internal solver error: the following specs were not solved:\n - "
|
||||
+ "\n - ".join(str(s) for s in no_output),
|
||||
stream=out,
|
||||
)
|
||||
if wrong_output:
|
||||
msg = (
|
||||
"internal solver error: the following specs were concretized, but do not satisfy the "
|
||||
"input:\n - "
|
||||
+ "\n - ".join(str(s) for s, _ in wrong_output)
|
||||
+ "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
)
|
||||
# try to write the input/output specs to a temporary directory for bug reports
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
|
||||
files = []
|
||||
for i, (input, output) in enumerate(wrong_output, start=1):
|
||||
in_file = os.path.join(tmpdir, f"input-{i}.json")
|
||||
out_file = os.path.join(tmpdir, f"output-{i}.json")
|
||||
files.append(in_file)
|
||||
files.append(out_file)
|
||||
with open(in_file, "w", encoding="utf-8") as f:
|
||||
input.to_json(f)
|
||||
with open(out_file, "w", encoding="utf-8") as f:
|
||||
output.to_json(f)
|
||||
|
||||
msg += " and attach the following files:\n - " + "\n - ".join(files)
|
||||
except Exception:
|
||||
msg += "."
|
||||
tty.error(msg, stream=out)
|
||||
|
||||
|
||||
class SpackCommandError(Exception):
|
||||
"""Raised when SpackCommand execution fails."""
|
||||
|
@ -162,6 +162,7 @@ class tty:
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
make: MakeExecutable
|
||||
nmake: Executable
|
||||
ninja: MakeExecutable
|
||||
python_include: str
|
||||
python_platlib: str
|
||||
|
@ -14,7 +14,6 @@
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@ -28,7 +27,7 @@
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.lang import ClassProperty, classproperty, memoized
|
||||
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
@ -701,10 +700,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
_verbose = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
homepage: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Default list URL (place to find available versions)
|
||||
list_url: Optional[str] = None
|
||||
list_url: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Link depth to which list_url should be searched for new versions
|
||||
list_depth = 0
|
||||
@ -818,12 +817,12 @@ def package_dir(cls):
|
||||
|
||||
@classproperty
|
||||
def module(cls):
|
||||
"""Module object (not just the name) that this package is defined in.
|
||||
"""Module instance that this package class is defined in.
|
||||
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return importlib.import_module(cls.__module__)
|
||||
return sys.modules[cls.__module__]
|
||||
|
||||
@classproperty
|
||||
def namespace(cls):
|
||||
@ -1821,7 +1820,7 @@ def _resource_stage(self, resource):
|
||||
resource_stage_folder = "-".join(pieces)
|
||||
return resource_stage_folder
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
def do_test(self, *, dirty=False, externals=False, timeout: Optional[int] = None):
|
||||
if self.test_requires_compiler and not any(
|
||||
lang in self.spec for lang in ("c", "cxx", "fortran")
|
||||
):
|
||||
@ -1839,7 +1838,7 @@ def do_test(self, dirty=False, externals=False):
|
||||
"verbose": tty.is_verbose(),
|
||||
}
|
||||
|
||||
self.tester.stand_alone_tests(kwargs)
|
||||
self.tester.stand_alone_tests(kwargs, timeout=timeout)
|
||||
|
||||
def unit_test_check(self):
|
||||
"""Hook for unit tests to assert things about package internals.
|
||||
|
@ -287,9 +287,33 @@ def specify(spec):
|
||||
return spack.spec.Spec(spec)
|
||||
|
||||
|
||||
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
|
||||
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
|
||||
def remove_facts(
|
||||
*to_be_removed: str,
|
||||
) -> Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]:
|
||||
"""Returns a transformation function that removes facts from the input list of facts."""
|
||||
|
||||
def _remove(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
return list(filter(lambda x: x.args[0] not in to_be_removed, facts))
|
||||
|
||||
return _remove
|
||||
|
||||
|
||||
def remove_build_deps(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
build_deps = {x.args[2]: x.args[1] for x in facts if x.args[0] == "depends_on"}
|
||||
result = []
|
||||
for x in facts:
|
||||
current_name = x.args[1]
|
||||
if current_name in build_deps:
|
||||
x.name = "build_requirement"
|
||||
result.append(fn.attr("build_requirement", build_deps[current_name], x))
|
||||
continue
|
||||
|
||||
if x.args[0] == "depends_on":
|
||||
continue
|
||||
|
||||
result.append(x)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def all_libcs() -> Set[spack.spec.Spec]:
|
||||
@ -1287,12 +1311,8 @@ def on_model(model):
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||
)
|
||||
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
|
||||
|
||||
if conc_cache_enabled:
|
||||
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
||||
concretization_stats = self.control.statistics
|
||||
@ -1735,15 +1755,17 @@ def define_variant(
|
||||
pkg_fact(fn.variant_condition(name, vid, cond_id))
|
||||
|
||||
# record type so we can construct the variant when we read it back in
|
||||
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.value))
|
||||
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.string))
|
||||
|
||||
if variant_def.sticky:
|
||||
pkg_fact(fn.variant_sticky(vid))
|
||||
|
||||
# define defaults for this variant definition
|
||||
defaults = variant_def.make_default().value if variant_def.multi else [variant_def.default]
|
||||
for val in sorted(defaults):
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
|
||||
if variant_def.multi:
|
||||
for val in sorted(variant_def.make_default().values):
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
|
||||
else:
|
||||
pkg_fact(fn.variant_default_value_from_package_py(vid, variant_def.default))
|
||||
|
||||
# define possible values for this variant definition
|
||||
values = variant_def.values
|
||||
@ -1771,7 +1793,9 @@ def define_variant(
|
||||
|
||||
# make a spec indicating whether the variant has this conditional value
|
||||
variant_has_value = spack.spec.Spec()
|
||||
variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value)
|
||||
variant_has_value.variants[name] = vt.VariantValue(
|
||||
vt.VariantType.MULTI, name, (value.value,)
|
||||
)
|
||||
|
||||
if value.when:
|
||||
# the conditional value is always "possible", but it imposes its when condition as
|
||||
@ -1884,7 +1908,7 @@ def condition(
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_node
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
@ -1984,7 +2008,7 @@ def track_dependencies(input_spec, requirements):
|
||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||
|
||||
def dependency_holds(input_spec, requirements):
|
||||
result = remove_node(input_spec, requirements) + [
|
||||
result = remove_facts("node", "virtual_node")(input_spec, requirements) + [
|
||||
fn.attr(
|
||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||
)
|
||||
@ -2174,7 +2198,10 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
pkg_name, ConstraintOrigin.REQUIRE
|
||||
)
|
||||
if not virtual:
|
||||
context.transform_imposed = remove_node
|
||||
context.transform_required = remove_build_deps
|
||||
context.transform_imposed = remove_facts(
|
||||
"node", "virtual_node", "depends_on"
|
||||
)
|
||||
# else: for virtuals we want to emit "node" and
|
||||
# "virtual_node" in imposed specs
|
||||
|
||||
@ -2236,16 +2263,18 @@ def external_packages(self):
|
||||
if pkg_name not in self.pkgs:
|
||||
continue
|
||||
|
||||
self.gen.h2(f"External package: {pkg_name}")
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
external_buildable = data.get("buildable", True)
|
||||
externals = data.get("externals", [])
|
||||
if not external_buildable or externals:
|
||||
self.gen.h2(f"External package: {pkg_name}")
|
||||
|
||||
if not external_buildable:
|
||||
self.gen.fact(fn.buildable_false(pkg_name))
|
||||
|
||||
# Read a list of all the specs for this package
|
||||
externals = data.get("externals", [])
|
||||
candidate_specs = [
|
||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||
]
|
||||
@ -2334,6 +2363,8 @@ def preferred_variants(self, pkg_name):
|
||||
if not preferred_variants:
|
||||
return
|
||||
|
||||
self.gen.h2(f"Package preferences: {pkg_name}")
|
||||
|
||||
for variant_name in sorted(preferred_variants):
|
||||
variant = preferred_variants[variant_name]
|
||||
|
||||
@ -2346,7 +2377,7 @@ def preferred_variants(self, pkg_name):
|
||||
)
|
||||
continue
|
||||
|
||||
for value in variant.value_as_tuple:
|
||||
for value in variant.values:
|
||||
for variant_def in variant_defs:
|
||||
self.variant_values_from_specs.add((pkg_name, id(variant_def), value))
|
||||
self.gen.fact(
|
||||
@ -2464,7 +2495,7 @@ def _spec_clauses(
|
||||
if variant.value == ("*",):
|
||||
continue
|
||||
|
||||
for value in variant.value_as_tuple:
|
||||
for value in variant.values:
|
||||
# ensure that the value *can* be valid for the spec
|
||||
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
|
||||
variant_defs = vt.prevalidate_variant_value(
|
||||
@ -2574,6 +2605,16 @@ def _spec_clauses(
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.depflag != dt.BUILD:
|
||||
clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
|
||||
elif not concrete_build_deps and dspec.depflag:
|
||||
clauses.append(
|
||||
fn.attr(
|
||||
"concrete_build_dependency", spec.name, dep.name, dep.dag_hash()
|
||||
)
|
||||
)
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
fn.attr("virtual_on_build_edge", spec.name, dep.name, virtual_name)
|
||||
)
|
||||
|
||||
# if the spec is abstract, descend into dependencies.
|
||||
# if it's concrete, then the hashes above take care of dependency
|
||||
@ -3128,7 +3169,6 @@ def setup(
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2("Package preferences: %s" % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
|
||||
self.gen.h1("Special variants")
|
||||
@ -3200,12 +3240,13 @@ def define_runtime_constraints(self) -> List[spack.spec.Spec]:
|
||||
|
||||
# FIXME (compiler as nodes): think of using isinstance(compiler_cls, WrappedCompiler)
|
||||
# Add a dependency on the compiler wrapper
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%{compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler}",
|
||||
)
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%[virtuals={language}] {compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler} for {language}",
|
||||
)
|
||||
|
||||
if not using_libc_compatibility():
|
||||
continue
|
||||
@ -3267,15 +3308,13 @@ def literal_specs(self, specs):
|
||||
# These facts are needed to compute the "condition_set" of the root
|
||||
pkg_name = clause.args[1]
|
||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||
elif clause_name == "depends_on":
|
||||
pkg_name = clause.args[2]
|
||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||
|
||||
requirements.append(
|
||||
fn.attr(
|
||||
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
|
||||
)
|
||||
)
|
||||
requirements = [x for x in requirements if x.args[0] != "depends_on"]
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
@ -3600,11 +3639,9 @@ def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
# (avoid adding virtuals everywhere, if a single edge needs it)
|
||||
_, provider, virtual = clause.args
|
||||
clause.args = "virtual_on_edge", node_placeholder, provider, virtual
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
body_str = ",\n".join(f" {x}" for x in body_clauses)
|
||||
body_str += f",\n not external({node_variable})"
|
||||
body_str = body_str.replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
for old, replacement in when_substitutions.items():
|
||||
body_str = body_str.replace(old, replacement)
|
||||
return body_str, node_variable
|
||||
@ -3795,13 +3832,13 @@ def node_os(self, node, os):
|
||||
def node_target(self, node, target):
|
||||
self._arch(node).target = target
|
||||
|
||||
def variant_selected(self, node, name, value, variant_type, variant_id):
|
||||
def variant_selected(self, node, name: str, value: str, variant_type: str, variant_id):
|
||||
spec = self._specs[node]
|
||||
variant = spec.variants.get(name)
|
||||
if not variant:
|
||||
spec.variants[name] = vt.VariantType(variant_type).variant_class(name, value)
|
||||
spec.variants[name] = vt.VariantValue.from_concretizer(name, value, variant_type)
|
||||
else:
|
||||
assert variant_type == vt.VariantType.MULTI.value, (
|
||||
assert variant_type == "multi", (
|
||||
f"Can't have multiple values for single-valued variant: "
|
||||
f"{node}, {name}, {value}, {variant_type}, {variant_id}"
|
||||
)
|
||||
@ -3825,6 +3862,17 @@ def external_spec_selected(self, node, idx):
|
||||
)
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# Annotate compiler specs from externals
|
||||
external_spec = spack.spec.Spec(spec_info["spec"])
|
||||
external_spec_deps = external_spec.dependencies()
|
||||
if len(external_spec_deps) > 1:
|
||||
raise InvalidExternalError(
|
||||
f"external spec {spec_info['spec']} cannot have more than one dependency"
|
||||
)
|
||||
elif len(external_spec_deps) == 1:
|
||||
compiler_str = external_spec_deps[0]
|
||||
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
@ -4180,10 +4228,10 @@ def _inject_patches_variant(root: spack.spec.Spec) -> None:
|
||||
continue
|
||||
|
||||
patches = list(spec_to_patches[id(spec)])
|
||||
variant: vt.MultiValuedVariant = spec.variants.setdefault(
|
||||
variant: vt.VariantValue = spec.variants.setdefault(
|
||||
"patches", vt.MultiValuedVariant("patches", ())
|
||||
)
|
||||
variant.value = tuple(p.sha256 for p in patches)
|
||||
variant.set(*(p.sha256 for p in patches))
|
||||
# FIXME: Monkey patches variant to store patches order
|
||||
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
|
||||
ordered_hashes.sort()
|
||||
@ -4651,13 +4699,9 @@ def solve_in_rounds(
|
||||
break
|
||||
|
||||
if not result.specs:
|
||||
# This is also a problem: no specs were solved for, which
|
||||
# means we would be in a loop if we tried again
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: a subset of input specs could not"
|
||||
f" be solved for.\n\t{unsolved_str}"
|
||||
)
|
||||
# This is also a problem: no specs were solved for, which means we would be in a
|
||||
# loop if we tried again
|
||||
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
|
||||
|
||||
input_specs = list(x for (x, y) in result.unsolved_specs)
|
||||
for spec in result.specs:
|
||||
@ -4687,6 +4731,19 @@ def __init__(self, msg):
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class OutputDoesNotSatisfyInputError(InternalConcretizerError):
|
||||
|
||||
def __init__(
|
||||
self, input_to_output: List[Tuple[spack.spec.Spec, Optional[spack.spec.Spec]]]
|
||||
) -> None:
|
||||
self.input_to_output = input_to_output
|
||||
super().__init__(
|
||||
"internal solver error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{Result.format_unsolved(input_to_output)}"
|
||||
)
|
||||
|
||||
|
||||
class SolverError(InternalConcretizerError):
|
||||
"""For cases where the solver is unable to produce a solution.
|
||||
|
||||
@ -4719,3 +4776,7 @@ class InvalidSpliceError(spack.error.SpackError):
|
||||
|
||||
class NoCompilerFoundError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
||||
|
||||
class InvalidExternalError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
@ -175,12 +175,24 @@ trigger_node(TriggerID, Node, Node) :-
|
||||
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
|
||||
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
|
||||
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
1 { literal_node(Root, node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
mentioned_in_literal(Root, Mentioned), Mentioned != Root,
|
||||
internal_error("must have exactly one condition_set for literals").
|
||||
|
||||
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
|
||||
literal_node(Root, LiteralNode),
|
||||
build(LiteralNode),
|
||||
not external(LiteralNode),
|
||||
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
|
||||
condition_set(LiteralNode, BuildNode) :- build_dependency_of_literal_node(LiteralNode, BuildNode).
|
||||
|
||||
:- build_dependency_of_literal_node(LiteralNode, BuildNode),
|
||||
not attr("depends_on", LiteralNode, BuildNode, "build").
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
solve_literal(TriggerID),
|
||||
@ -472,10 +484,53 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
||||
imposed_constraint(ID, "depends_on", A1, A2, A3),
|
||||
internal_error("Build deps must land in exactly one duplicate").
|
||||
|
||||
1 { build_requirement(node(X, Parent), node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1
|
||||
% If the parent is built, then we have a build_requirement on another node. For concrete nodes,
|
||||
% or external nodes, we don't since we are trimming their build dependencies.
|
||||
1 { attr("depends_on", node(X, Parent), node(0..Y-1, BuildDependency), "build") : max_dupes(BuildDependency, Y) } 1
|
||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||
impose(ID, node(X, Parent)),
|
||||
imposed_constraint(ID,"build_requirement",Parent,_).
|
||||
build(node(X, Parent)),
|
||||
not external(node(X, Parent)).
|
||||
|
||||
% Concrete nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
concrete(ParentNode),
|
||||
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
|
||||
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
:- attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
% External nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
external(ParentNode),
|
||||
not attr("external_build_requirement", ParentNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
candidate_external_version(Constraint, BuildDependency, Version)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
external(ParentNode),
|
||||
pkg_fact(BuildDependency, version_satisfies(Constraint, Version)).
|
||||
|
||||
error(100, "External {0} cannot satisfy both {1} and {2}", BuildDependency, LiteralConstraint, ExternalConstraint)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, LiteralConstraint)),
|
||||
external(ParentNode),
|
||||
attr("external_build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, ExternalConstraint)),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(ExternalConstraint, Version)) : candidate_external_version(LiteralConstraint, BuildDependency, Version) }.
|
||||
|
||||
|
||||
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
|
||||
% we have on externals
|
||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||
Parent == BuildDependency,
|
||||
external(node(X, Parent)).
|
||||
|
||||
build_requirement(node(X, Parent), node(Y, BuildDependency)) :-
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build"),
|
||||
attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)).
|
||||
|
||||
1 { virtual_build_requirement(ParentNode, node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1
|
||||
:- attr("dependency_holds", ParentNode, Virtual, "build"),
|
||||
@ -496,7 +551,6 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
|
||||
|
||||
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
|
||||
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
@ -882,6 +936,12 @@ requirement_weight(node(ID, Package), Group, W) :-
|
||||
requirement_policy(Package, Group, "one_of"),
|
||||
requirement_group_satisfied(node(ID, Package), Group).
|
||||
|
||||
{ attr("build_requirement", node(ID, Package), BuildRequirement) : condition_requirement(TriggerID, "build_requirement", Package, BuildRequirement) } :-
|
||||
pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
|
||||
requirement_group_member(ConditionID, Package, Group),
|
||||
activate_requirement(node(ID, Package), Group),
|
||||
requirement_group(Package, Group).
|
||||
|
||||
requirement_group_satisfied(node(ID, Package), X) :-
|
||||
1 { condition_holds(Y, node(ID, Package)) : requirement_group_member(Y, Package, X) } ,
|
||||
requirement_policy(Package, X, "any_of"),
|
||||
|
@ -85,8 +85,10 @@ def is_virtual(self, name: str) -> bool:
|
||||
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
|
||||
"""Returns true if a package is allowed on the current host"""
|
||||
pkg_cls = self.repo.get_pkg_class(pkg_name)
|
||||
no_condition = spack.spec.Spec()
|
||||
for when_spec, conditions in pkg_cls.requirements.items():
|
||||
if not when_spec.intersects(self._platform_condition):
|
||||
# Restrict analysis to unconditional requirements
|
||||
if when_spec != no_condition:
|
||||
continue
|
||||
for requirements, _, _ in conditions:
|
||||
if not any(x.intersects(self._platform_condition) for x in requirements):
|
||||
|
@ -111,22 +111,14 @@
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
"SpecParseError",
|
||||
"UnsupportedPropagationError",
|
||||
"DuplicateDependencyError",
|
||||
"DuplicateCompilerSpecError",
|
||||
"UnsupportedCompilerError",
|
||||
"DuplicateArchitectureError",
|
||||
"InconsistentSpecError",
|
||||
"InvalidDependencyError",
|
||||
"NoProviderError",
|
||||
"MultipleProviderError",
|
||||
"UnsatisfiableSpecNameError",
|
||||
"UnsatisfiableVersionSpecError",
|
||||
"UnsatisfiableCompilerSpecError",
|
||||
"UnsatisfiableCompilerFlagSpecError",
|
||||
"UnsatisfiableArchitectureSpecError",
|
||||
"UnsatisfiableProviderSpecError",
|
||||
"UnsatisfiableDependencySpecError",
|
||||
"AmbiguousHashError",
|
||||
"InvalidHashError",
|
||||
@ -1437,7 +1429,7 @@ def with_compiler(self, compiler: "Spec") -> "SpecAnnotations":
|
||||
def __repr__(self) -> str:
|
||||
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
|
||||
if self.compiler_node_attribute:
|
||||
result += f"with_compiler({str(self.compiler_node_attribute)})"
|
||||
result += f".with_compiler({str(self.compiler_node_attribute)})"
|
||||
return result
|
||||
|
||||
|
||||
@ -1706,10 +1698,10 @@ def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
|
||||
result[key] = list(group)
|
||||
return result
|
||||
|
||||
def _add_flag(self, name, value, propagate):
|
||||
"""Called by the parser to add a known flag.
|
||||
Known flags currently include "arch"
|
||||
"""
|
||||
def _add_flag(
|
||||
self, name: str, value: Union[str, bool], propagate: bool, concrete: bool
|
||||
) -> None:
|
||||
"""Called by the parser to add a known flag"""
|
||||
|
||||
if propagate and name in vt.RESERVED_NAMES:
|
||||
raise UnsupportedPropagationError(
|
||||
@ -1718,6 +1710,7 @@ def _add_flag(self, name, value, propagate):
|
||||
|
||||
valid_flags = FlagMap.valid_compiler_flags()
|
||||
if name == "arch" or name == "architecture":
|
||||
assert type(value) is str, "architecture have a string value"
|
||||
parts = tuple(value.split("-"))
|
||||
plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
|
||||
self._set_architecture(platform=plat, os=os, target=tgt)
|
||||
@ -1731,19 +1724,15 @@ def _add_flag(self, name, value, propagate):
|
||||
self.namespace = value
|
||||
elif name in valid_flags:
|
||||
assert self.compiler_flags is not None
|
||||
assert type(value) is str, f"{name} must have a string value"
|
||||
flags_and_propagation = spack.compilers.flags.tokenize_flags(value, propagate)
|
||||
flag_group = " ".join(x for (x, y) in flags_and_propagation)
|
||||
for flag, propagation in flags_and_propagation:
|
||||
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
|
||||
else:
|
||||
# FIXME:
|
||||
# All other flags represent variants. 'foo=true' and 'foo=false'
|
||||
# map to '+foo' and '~foo' respectively. As such they need a
|
||||
# BoolValuedVariant instance.
|
||||
if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
|
||||
self.variants[name] = vt.BoolValuedVariant(name, value, propagate)
|
||||
else:
|
||||
self.variants[name] = vt.AbstractVariant(name, value, propagate)
|
||||
self.variants[name] = vt.VariantValue.from_string_or_bool(
|
||||
name, value, propagate=propagate, concrete=concrete
|
||||
)
|
||||
|
||||
def _set_architecture(self, **kwargs):
|
||||
"""Called by the parser to set the architecture."""
|
||||
@ -2351,6 +2340,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
d["abstract"] = sorted(v.name for v in self.variants.values() if not v.concrete)
|
||||
|
||||
if self.external:
|
||||
d["external"] = {
|
||||
@ -3077,7 +3067,7 @@ def constrain(self, other, deps=True):
|
||||
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
|
||||
|
||||
for v in [x for x in other.variants if x in self.variants]:
|
||||
if not self.variants[v].compatible(other.variants[v]):
|
||||
if not self.variants[v].intersects(other.variants[v]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
@ -3404,7 +3394,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies:
|
||||
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
@ -3415,6 +3405,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
||||
# will be verified later.
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
mock_nodes_from_old_specfiles = set()
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||
@ -3436,13 +3427,27 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
if current_node.original_spec_format() < 5 or (
|
||||
current_node.original_spec_format() >= 5 and current_node.external
|
||||
):
|
||||
compiler_spec = current_node.annotations.compiler_node_attribute
|
||||
if compiler_spec is None:
|
||||
return False
|
||||
|
||||
mock_nodes_from_old_specfiles.add(compiler_spec)
|
||||
# This checks that the single node compiler spec satisfies the request
|
||||
# of a direct dependency. The check is not perfect, but based on heuristic.
|
||||
if not compiler_spec.satisfies(rhs_edge.spec):
|
||||
return False
|
||||
|
||||
else:
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
|
||||
continue
|
||||
|
||||
@ -3482,8 +3487,9 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
|
||||
@ -3957,6 +3963,8 @@ def format_attribute(match_object: Match) -> str:
|
||||
except AttributeError:
|
||||
if part == "compiler":
|
||||
return "none"
|
||||
elif part == "specfile_version":
|
||||
return f"v{current.original_spec_format()}"
|
||||
|
||||
raise SpecFormatStringError(
|
||||
f"Attempted to format attribute {attribute}. "
|
||||
@ -4492,7 +4500,7 @@ def __init__(self, spec: Spec):
|
||||
|
||||
def __setitem__(self, name, vspec):
|
||||
# Raise a TypeError if vspec is not of the right type
|
||||
if not isinstance(vspec, vt.AbstractVariant):
|
||||
if not isinstance(vspec, vt.VariantValue):
|
||||
raise TypeError(
|
||||
"VariantMap accepts only values of variant types "
|
||||
f"[got {type(vspec).__name__} instead]"
|
||||
@ -4602,8 +4610,7 @@ def constrain(self, other: "VariantMap") -> bool:
|
||||
changed = False
|
||||
for k in other:
|
||||
if k in self:
|
||||
# If they are not compatible raise an error
|
||||
if not self[k].compatible(other[k]):
|
||||
if not self[k].intersects(other[k]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self[k], other[k])
|
||||
# If they are compatible merge them
|
||||
changed |= self[k].constrain(other[k])
|
||||
@ -4633,7 +4640,7 @@ def __str__(self):
|
||||
bool_keys = []
|
||||
kv_keys = []
|
||||
for key in sorted_keys:
|
||||
if isinstance(self[key].value, bool):
|
||||
if self[key].type == vt.VariantType.BOOL:
|
||||
bool_keys.append(key)
|
||||
else:
|
||||
kv_keys.append(key)
|
||||
@ -4666,7 +4673,8 @@ def substitute_abstract_variants(spec: Spec):
|
||||
unknown = []
|
||||
for name, v in spec.variants.items():
|
||||
if name == "dev_path":
|
||||
spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
|
||||
v.type = vt.VariantType.SINGLE
|
||||
v.concrete = True
|
||||
continue
|
||||
elif name in vt.RESERVED_NAMES:
|
||||
continue
|
||||
@ -4689,7 +4697,7 @@ def substitute_abstract_variants(spec: Spec):
|
||||
if rest:
|
||||
continue
|
||||
|
||||
new_variant = pkg_variant.make_variant(v._original_value)
|
||||
new_variant = pkg_variant.make_variant(*v.values)
|
||||
pkg_variant.validate_or_raise(new_variant, spec.name)
|
||||
spec.variants.substitute(new_variant)
|
||||
|
||||
@ -4807,6 +4815,7 @@ def from_node_dict(cls, node):
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
abstract_variants = set(node.get("abstract", ()))
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
@ -4814,8 +4823,8 @@ def from_node_dict(cls, node):
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
spec.variants[name] = vt.VariantValue.from_node_dict(
|
||||
name, values, propagate=propagate, abstract=name in abstract_variants
|
||||
)
|
||||
|
||||
spec.external_path = None
|
||||
@ -4840,7 +4849,7 @@ def from_node_dict(cls, node):
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
mvar.set(*patches)
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
@ -5165,25 +5174,6 @@ def eval_conditional(string):
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
"""Wrapper for ParseError for when we're parsing specs."""
|
||||
|
||||
def __init__(self, parse_error):
|
||||
super().__init__(parse_error.message)
|
||||
self.string = parse_error.string
|
||||
self.pos = parse_error.pos
|
||||
|
||||
@property
|
||||
def long_message(self):
|
||||
return "\n".join(
|
||||
[
|
||||
" Encountered when parsing spec:",
|
||||
" %s" % self.string,
|
||||
" %s^" % (" " * self.pos),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvalidVariantForSpecError(spack.error.SpecError):
|
||||
"""Raised when an invalid conditional variant is specified."""
|
||||
|
||||
@ -5201,14 +5191,6 @@ class DuplicateDependencyError(spack.error.SpecError):
|
||||
"""Raised when the same dependency occurs in a spec twice."""
|
||||
|
||||
|
||||
class MultipleVersionError(spack.error.SpecError):
|
||||
"""Raised when version constraints occur in a spec twice."""
|
||||
|
||||
|
||||
class DuplicateCompilerSpecError(spack.error.SpecError):
|
||||
"""Raised when the same compiler occurs in a spec twice."""
|
||||
|
||||
|
||||
class UnsupportedCompilerError(spack.error.SpecError):
|
||||
"""Raised when the user asks for a compiler spack doesn't know about."""
|
||||
|
||||
@ -5217,11 +5199,6 @@ class DuplicateArchitectureError(spack.error.SpecError):
|
||||
"""Raised when the same architecture occurs in a spec twice."""
|
||||
|
||||
|
||||
class InconsistentSpecError(spack.error.SpecError):
|
||||
"""Raised when two nodes in the same spec DAG have inconsistent
|
||||
constraints."""
|
||||
|
||||
|
||||
class InvalidDependencyError(spack.error.SpecError):
|
||||
"""Raised when a dependency in a spec is not actually a dependency
|
||||
of the package."""
|
||||
@ -5233,30 +5210,6 @@ def __init__(self, pkg, deps):
|
||||
)
|
||||
|
||||
|
||||
class NoProviderError(spack.error.SpecError):
|
||||
"""Raised when there is no package that provides a particular
|
||||
virtual dependency.
|
||||
"""
|
||||
|
||||
def __init__(self, vpkg):
|
||||
super().__init__("No providers found for virtual package: '%s'" % vpkg)
|
||||
self.vpkg = vpkg
|
||||
|
||||
|
||||
class MultipleProviderError(spack.error.SpecError):
|
||||
"""Raised when there is no package that provides a particular
|
||||
virtual dependency.
|
||||
"""
|
||||
|
||||
def __init__(self, vpkg, providers):
|
||||
"""Takes the name of the vpkg"""
|
||||
super().__init__(
|
||||
"Multiple providers found for '%s': %s" % (vpkg, [str(s) for s in providers])
|
||||
)
|
||||
self.vpkg = vpkg
|
||||
self.providers = providers
|
||||
|
||||
|
||||
class UnsatisfiableSpecNameError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when two specs aren't even for the same package."""
|
||||
|
||||
@ -5271,20 +5224,6 @@ def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "version")
|
||||
|
||||
|
||||
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec compiler conflicts with package constraints."""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "compiler")
|
||||
|
||||
|
||||
class UnsatisfiableCompilerFlagSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec variant conflicts with package constraints."""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "compiler_flags")
|
||||
|
||||
|
||||
class UnsatisfiableArchitectureSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a spec architecture conflicts with package constraints."""
|
||||
|
||||
@ -5292,14 +5231,6 @@ def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "architecture")
|
||||
|
||||
|
||||
class UnsatisfiableProviderSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""Raised when a provider is supplied but constraints don't match
|
||||
a vpkg requirement"""
|
||||
|
||||
def __init__(self, provided, required):
|
||||
super().__init__(provided, required, "provider")
|
||||
|
||||
|
||||
# TODO: get rid of this and be more specific about particular incompatible
|
||||
# dep constraints
|
||||
class UnsatisfiableDependencySpecError(spack.error.UnsatisfiableSpecError):
|
||||
|
@ -62,7 +62,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Iterator, List, Optional, Tuple
|
||||
from typing import Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
@ -99,8 +99,7 @@
|
||||
VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
|
||||
VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(:?==?)(.*)$")
|
||||
|
||||
#: Regex with groups to use for splitting %[virtuals=...] tokens
|
||||
SPLIT_COMPILER_TOKEN = re.compile(rf"^%\[virtuals=({VALUE}|{QUOTED_VALUE})]\s*(.*)$")
|
||||
@ -135,8 +134,8 @@ class SpecTokens(TokenBase):
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}:?==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}:?=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
@ -370,10 +369,10 @@ def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||
"""Raise a spec parsing error with token context."""
|
||||
raise SpecParsingError(string, self.ctx.current_token, self.literal_str) from cause
|
||||
|
||||
def add_flag(name: str, value: str, propagate: bool):
|
||||
def add_flag(name: str, value: Union[str, bool], propagate: bool, concrete: bool):
|
||||
"""Wrapper around ``Spec._add_flag()`` that adds parser context to errors raised."""
|
||||
try:
|
||||
initial_spec._add_flag(name, value, propagate)
|
||||
initial_spec._add_flag(name, value, propagate, concrete)
|
||||
except Exception as e:
|
||||
raise_parsing_error(str(e), e)
|
||||
|
||||
@ -428,29 +427,34 @@ def warn_if_after_compiler(token: str):
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
||||
name = self.ctx.current_token.value[1:].strip()
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
||||
add_flag(name, variant_value, propagate=False, concrete=True)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
||||
name = self.ctx.current_token.value[2:].strip()
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
||||
add_flag(name, variant_value, propagate=True, concrete=True)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
concrete = name.endswith(":")
|
||||
if concrete:
|
||||
name = name[:-1]
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||
add_flag(
|
||||
name, strip_quotes_and_unescape(value), propagate=False, concrete=concrete
|
||||
)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
concrete = name.endswith(":")
|
||||
if concrete:
|
||||
name = name[:-1]
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True, concrete=concrete)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
||||
@ -509,7 +513,8 @@ def parse(self):
|
||||
while True:
|
||||
if self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
if name.endswith(":"):
|
||||
name = name[:-1]
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
|
@ -1,9 +1,12 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import multiprocessing
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
@ -828,3 +831,88 @@ def test_extra_rpaths_is_set(
|
||||
assert os.environ["SPACK_COMPILER_EXTRA_RPATHS"] == expected_rpaths
|
||||
else:
|
||||
assert "SPACK_COMPILER_EXTRA_RPATHS" not in os.environ
|
||||
|
||||
|
||||
class _TestProcess:
|
||||
calls: Dict[str, int] = collections.defaultdict(int)
|
||||
terminated = False
|
||||
runtime = 0
|
||||
|
||||
def __init__(self, *, target, args):
|
||||
self.alive = None
|
||||
self.exitcode = 0
|
||||
self._reset()
|
||||
|
||||
def start(self):
|
||||
self.calls["start"] += 1
|
||||
self.alive = True
|
||||
|
||||
def is_alive(self):
|
||||
self.calls["is_alive"] += 1
|
||||
return self.alive
|
||||
|
||||
def join(self, timeout: Optional[int] = None):
|
||||
self.calls["join"] += 1
|
||||
if timeout is not None and timeout > self.runtime:
|
||||
self.alive = False
|
||||
|
||||
def terminate(self):
|
||||
self.calls["terminate"] += 1
|
||||
self._set_terminated()
|
||||
self.alive = False
|
||||
|
||||
@classmethod
|
||||
def _set_terminated(cls):
|
||||
cls.terminated = True
|
||||
|
||||
@classmethod
|
||||
def _reset(cls):
|
||||
cls.calls.clear()
|
||||
cls.terminated = False
|
||||
|
||||
|
||||
class _TestPipe:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def recv(self):
|
||||
if _TestProcess.terminated is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def _pipe_fn(*, duplex: bool = False) -> Tuple[_TestPipe, _TestPipe]:
|
||||
return _TestPipe(), _TestPipe()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_build_process(monkeypatch):
|
||||
monkeypatch.setattr(spack.build_environment, "BuildProcess", _TestProcess)
|
||||
monkeypatch.setattr(multiprocessing, "Pipe", _pipe_fn)
|
||||
|
||||
def _factory(*, runtime: int):
|
||||
_TestProcess.runtime = runtime
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"runtime,timeout,expected_result,expected_calls",
|
||||
[
|
||||
# execution time < timeout
|
||||
(2, 5, 0, {"start": 1, "join": 1, "is_alive": 1}),
|
||||
# execution time > timeout
|
||||
(5, 2, 1, {"start": 1, "join": 2, "is_alive": 1, "terminate": 1}),
|
||||
],
|
||||
)
|
||||
def test_build_process_timeout(
|
||||
mock_build_process, runtime, timeout, expected_result, expected_calls
|
||||
):
|
||||
"""Tests that we make the correct function calls in different timeout scenarios."""
|
||||
mock_build_process(runtime=runtime)
|
||||
result = spack.build_environment.start_build_process(
|
||||
pkg=None, function=None, kwargs={}, timeout=timeout
|
||||
)
|
||||
|
||||
assert result == expected_result
|
||||
assert _TestProcess.calls == expected_calls
|
||||
|
@ -873,10 +873,6 @@ def test_push_to_build_cache(
|
||||
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
|
||||
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
|
||||
|
||||
dl_dir = scratch / "download_dir"
|
||||
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
|
||||
assert len(os.listdir(dl_dir)) == 2
|
||||
|
||||
|
||||
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
|
||||
def push_or_raise(*args, **kwargs):
|
||||
|
@ -1831,10 +1831,7 @@ def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages):
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "specs", list())
|
||||
|
||||
with pytest.raises(
|
||||
spack.solver.asp.InternalConcretizerError,
|
||||
match="a subset of input specs could not be solved for",
|
||||
):
|
||||
with pytest.raises(spack.solver.asp.OutputDoesNotSatisfyInputError):
|
||||
list(solver.solve_in_rounds(specs))
|
||||
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
@ -3336,3 +3333,110 @@ def test_specifying_compilers_with_virtuals_syntax(default_mock_concretization):
|
||||
assert mpich["fortran"].satisfies("gcc")
|
||||
assert mpich["c"].satisfies("llvm")
|
||||
assert mpich["cxx"].satisfies("llvm")
|
||||
|
||||
|
||||
@pytest.mark.regression("49847")
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="issues with install mockery")
|
||||
def test_reuse_when_input_specifies_build_dep(install_mockery, do_not_check_runtimes_on_reuse):
|
||||
"""Test that we can reuse a spec when specifying build dependencies in the input"""
|
||||
pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9 %gcc@9"))
|
||||
PackageInstaller([pkgb_old.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
result = spack.concretize.concretize_one("pkg-b %gcc")
|
||||
assert pkgb_old.dag_hash() == result.dag_hash()
|
||||
|
||||
result = spack.concretize.concretize_one("pkg-a ^pkg-b %gcc@9")
|
||||
assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash()
|
||||
assert result.satisfies("%gcc@9")
|
||||
|
||||
result = spack.concretize.concretize_one("pkg-a %gcc@10 ^pkg-b %gcc@9")
|
||||
assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.regression("49847")
|
||||
def test_reuse_when_requiring_build_dep(
|
||||
install_mockery, do_not_check_runtimes_on_reuse, mutable_config
|
||||
):
|
||||
"""Test that we can reuse a spec when specifying build dependencies in requirements"""
|
||||
mutable_config.set("packages:all:require", "%gcc")
|
||||
pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9"))
|
||||
PackageInstaller([pkgb_old.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
result = spack.concretize.concretize_one("pkg-b")
|
||||
assert pkgb_old.dag_hash() == result.dag_hash(), result.tree()
|
||||
|
||||
|
||||
@pytest.mark.regression("50167")
|
||||
def test_input_analysis_and_conditional_requirements(default_mock_concretization):
|
||||
"""Tests that input analysis doesn't account for conditional requirement
|
||||
to discard possible dependencies.
|
||||
|
||||
If the requirement is conditional, and impossible to achieve on the current
|
||||
platform, the valid search space is still the complement of the condition that
|
||||
activates the requirement.
|
||||
"""
|
||||
libceed = default_mock_concretization("libceed")
|
||||
assert libceed["libxsmm"].satisfies("@main")
|
||||
assert libceed["libxsmm"].satisfies("platform=test")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_str,expected,not_expected",
|
||||
[
|
||||
# Compiler queries are as specific as the constraint on the external
|
||||
("gcc@10", ["%gcc", "%gcc@10"], ["%clang", "%gcc@9"]),
|
||||
("gcc", ["%gcc"], ["%clang", "%gcc@9", "%gcc@10"]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.regression("49841")
|
||||
def test_installing_external_with_compilers_directly(
|
||||
compiler_str, expected, not_expected, mutable_config, mock_packages, tmp_path
|
||||
):
|
||||
"""Tests that version constraints are taken into account for compiler annotations
|
||||
on externals
|
||||
"""
|
||||
spec_str = f"libelf@0.8.12 %{compiler_str}"
|
||||
packages_yaml = syaml.load_config(
|
||||
f"""
|
||||
packages:
|
||||
libelf:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: {spec_str}
|
||||
prefix: {tmp_path / 'libelf'}
|
||||
"""
|
||||
)
|
||||
mutable_config.set("packages", packages_yaml["packages"])
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
assert s.external
|
||||
assert all(s.satisfies(c) for c in expected)
|
||||
assert all(not s.satisfies(c) for c in not_expected)
|
||||
|
||||
|
||||
@pytest.mark.regression("49841")
|
||||
def test_using_externals_with_compilers(mutable_config, mock_packages, tmp_path):
|
||||
"""Tests that version constraints are taken into account for compiler annotations
|
||||
on externals, even imposed as transitive deps.
|
||||
"""
|
||||
packages_yaml = syaml.load_config(
|
||||
f"""
|
||||
packages:
|
||||
libelf:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: libelf@0.8.12 %gcc@10
|
||||
prefix: {tmp_path / 'libelf'}
|
||||
"""
|
||||
)
|
||||
mutable_config.set("packages", packages_yaml["packages"])
|
||||
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@:9")
|
||||
|
||||
s = spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@10:")
|
||||
|
||||
libelf = s["libelf"]
|
||||
assert libelf.external and libelf.satisfies("%gcc")
|
||||
|
@ -2,11 +2,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
|
||||
version_error_messages = [
|
||||
"Cannot satisfy",
|
||||
@ -60,3 +64,31 @@ def test_error_messages(error_messages, config_set, spec, mock_packages, mutable
|
||||
|
||||
for em in error_messages:
|
||||
assert em in str(e.value)
|
||||
|
||||
|
||||
def test_internal_error_handling_formatting(tmp_path):
|
||||
log = StringIO()
|
||||
input_to_output = [
|
||||
(spack.spec.Spec("foo+x"), spack.spec.Spec("foo@=1.0~x")),
|
||||
(spack.spec.Spec("bar+y"), spack.spec.Spec("x@=1.0~y")),
|
||||
(spack.spec.Spec("baz+z"), None),
|
||||
]
|
||||
spack.main._handle_solver_bug(
|
||||
spack.solver.asp.OutputDoesNotSatisfyInputError(input_to_output), root=tmp_path, out=log
|
||||
)
|
||||
|
||||
output = log.getvalue()
|
||||
assert "the following specs were not solved:\n - baz+z\n" in output
|
||||
assert (
|
||||
"the following specs were concretized, but do not satisfy the input:\n"
|
||||
" - foo+x\n"
|
||||
" - bar+y\n"
|
||||
) in output
|
||||
|
||||
files = {f.name: str(f) for f in tmp_path.glob("spack-asp-*/*.json")}
|
||||
assert {"input-1.json", "input-2.json", "output-1.json", "output-2.json"} == set(files.keys())
|
||||
|
||||
assert spack.spec.Spec.from_specfile(files["input-1.json"]) == spack.spec.Spec("foo+x")
|
||||
assert spack.spec.Spec.from_specfile(files["input-2.json"]) == spack.spec.Spec("bar+y")
|
||||
assert spack.spec.Spec.from_specfile(files["output-1.json"]) == spack.spec.Spec("foo@=1.0~x")
|
||||
assert spack.spec.Spec.from_specfile(files["output-2.json"]) == spack.spec.Spec("x@=1.0~y")
|
||||
|
@ -1239,3 +1239,68 @@ def test_virtual_requirement_respects_any_of(concretize_scope, mock_packages):
|
||||
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
spack.concretize.concretize_one("mpileaks ^[virtuals=mpi] zmpi")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"packages_yaml,expected_reuse,expected_contraints",
|
||||
[
|
||||
(
|
||||
"""
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- "%gcc"
|
||||
""",
|
||||
True,
|
||||
# To minimize installed specs we reuse pkg-b compiler, since the requirement allows it
|
||||
["%gcc@9"],
|
||||
),
|
||||
(
|
||||
"""
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- "%gcc@10"
|
||||
""",
|
||||
False,
|
||||
["%gcc@10"],
|
||||
),
|
||||
(
|
||||
"""
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- "%gcc"
|
||||
pkg-a:
|
||||
require:
|
||||
- "%gcc@10"
|
||||
""",
|
||||
True,
|
||||
["%gcc@10"],
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.regression("49847")
|
||||
def test_requirements_on_compilers_and_reuse(
|
||||
concretize_scope, mock_packages, packages_yaml, expected_reuse, expected_contraints
|
||||
):
|
||||
"""Tests that we can require compilers with `%` in configuration files, and still get reuse
|
||||
of specs (even though reused specs have no build dependency in the ASP encoding).
|
||||
"""
|
||||
input_spec = "pkg-a"
|
||||
|
||||
reused_spec = spack.concretize.concretize_one("pkg-b@0.9 %gcc@9")
|
||||
reused_nodes = list(reused_spec.traverse())
|
||||
update_packages_config(packages_yaml)
|
||||
root_specs = [Spec(input_spec)]
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reused_nodes)
|
||||
pkga = result.specs[0]
|
||||
is_pkgb_reused = pkga["pkg-b"].dag_hash() == reused_spec.dag_hash()
|
||||
|
||||
assert is_pkgb_reused == expected_reuse
|
||||
for c in expected_contraints:
|
||||
assert pkga.satisfies(c), print(pkga.tree())
|
||||
|
BIN
lib/spack/spack/test/data/database/index.json.v7_v8.json.gz
Normal file
BIN
lib/spack/spack/test/data/database/index.json.v7_v8.json.gz
Normal file
Binary file not shown.
@ -47,10 +47,10 @@ class Grads(AutotoolsPackage):
|
||||
depends_on('readline')
|
||||
depends_on('pkgconfig', type='build')
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set('SUPPLIBS', '/')
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set('GADDIR', self.prefix.data)
|
||||
|
||||
@run_after('install')
|
||||
|
@ -517,7 +517,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
return (None, flags, None)
|
||||
return (flags, None, None)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
|
||||
if self.compiler.name in ["clang", "apple-clang"]:
|
||||
for lld in "ld.lld-{0}".format(self.compiler.version.version[0]), "ld.lld":
|
||||
@ -528,7 +528,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
os.symlink(bin, sym)
|
||||
env.prepend_path("PATH", self.stage.path)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if "+clang" in self.spec:
|
||||
env.set("CC", join_path(self.spec.prefix.bin, "clang"))
|
||||
env.set("CXX", join_path(self.spec.prefix.bin, "clang++"))
|
||||
|
@ -318,7 +318,7 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
patch('mfem-4.0.0-makefile-syntax-fix.patch', when='@4.0.0')
|
||||
phases = ['configure', 'build', 'install']
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.unset('MFEM_DIR')
|
||||
env.unset('MFEM_BUILD_DIR')
|
||||
|
||||
|
@ -281,7 +281,7 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
"caffe2/CMakeLists.txt",
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Set environment variables used to control the build.
|
||||
|
||||
PyTorch's ``setup.py`` is a thin wrapper around ``cmake``.
|
||||
|
@ -440,7 +440,7 @@ class Trilinos(CMakePackage, CudaPackage):
|
||||
url = "https://github.com/trilinos/Trilinos/archive/trilinos-release-{0}.tar.gz"
|
||||
return url.format(version.dashed)
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
||||
if "+cuda" in self.spec:
|
||||
# currently Trilinos doesn't perform the memory fence so
|
||||
# it relies on blocking CUDA kernel launch. This is needed
|
||||
@ -453,7 +453,7 @@ class Trilinos(CMakePackage, CudaPackage):
|
||||
else:
|
||||
self.spec.kokkos_cxx = spack_cxx
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
spec = self.spec
|
||||
if "+cuda" in spec and "+wrapper" in spec:
|
||||
if "+mpi" in spec:
|
||||
@ -847,7 +847,7 @@ class Trilinos(CMakePackage, CudaPackage):
|
||||
)
|
||||
filter_file(r"-lpytrilinos", "", "%s/Makefile.export.Trilinos" % self.prefix.include)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if "+exodus" in self.spec:
|
||||
env.prepend_path("PYTHONPATH", self.prefix.lib)
|
||||
|
||||
|
@ -5,6 +5,7 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import functools
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
@ -32,6 +33,7 @@
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@ -1243,3 +1245,26 @@ def test_query_with_predicate_fn(database):
|
||||
|
||||
specs = database.query(predicate_fn=lambda x: not spack.repo.PATH.exists(x.spec.name))
|
||||
assert not specs
|
||||
|
||||
|
||||
@pytest.mark.regression("49964")
|
||||
def test_querying_reindexed_database_specfilev5(tmp_path):
|
||||
"""Tests that we can query a reindexed database from before compilers as dependencies,
|
||||
and get appropriate results for %<compiler> and similar selections.
|
||||
"""
|
||||
test_path = pathlib.Path(spack.paths.test_path)
|
||||
zipfile = test_path / "data" / "database" / "index.json.v7_v8.json.gz"
|
||||
with gzip.open(str(zipfile), "rt", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
index_json = tmp_path / spack.database._DB_DIRNAME / spack.database.INDEX_JSON_FILE
|
||||
index_json.parent.mkdir(parents=True)
|
||||
index_json.write_text(json.dumps(data))
|
||||
|
||||
db = spack.database.Database(str(tmp_path))
|
||||
|
||||
specs = db.query("%gcc")
|
||||
|
||||
assert len(specs) == 8
|
||||
assert len([x for x in specs if x.external]) == 2
|
||||
assert len([x for x in specs if x.original_spec_format() < 5]) == 8
|
||||
|
@ -20,7 +20,7 @@
|
||||
SpackEnvironmentViewError,
|
||||
_error_on_nonempty_view_dir,
|
||||
)
|
||||
from spack.spec_list import UndefinedReferenceError
|
||||
from spack.environment.list import UndefinedReferenceError
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
|
||||
|
||||
@ -107,7 +107,8 @@ def test_env_change_spec_in_definition(tmp_path, mock_packages, mutable_mock_env
|
||||
|
||||
assert any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||
|
||||
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
|
||||
with e:
|
||||
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
|
||||
e.write()
|
||||
|
||||
# Ensure changed specs are in memory
|
||||
@ -776,10 +777,8 @@ def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages):
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(env_path)
|
||||
with e:
|
||||
with pytest.raises(UndefinedReferenceError, match=r"which does not appear"):
|
||||
e.concretize()
|
||||
with pytest.raises(UndefinedReferenceError, match=r"which is not defined"):
|
||||
_ = ev.Environment(env_path)
|
||||
|
||||
|
||||
@pytest.mark.regression("41292")
|
||||
|
@ -2,6 +2,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
from spack import fetch_strategy
|
||||
@ -13,3 +15,136 @@ def test_fetchstrategy_bad_url_scheme():
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
fetcher = fetch_strategy.from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected,total_bytes",
|
||||
[
|
||||
(" 0.00 B", 0),
|
||||
(" 999.00 B", 999),
|
||||
(" 1.00 KB", 1000),
|
||||
(" 2.05 KB", 2048),
|
||||
(" 1.00 MB", 1e6),
|
||||
(" 12.30 MB", 1.23e7),
|
||||
(" 1.23 GB", 1.23e9),
|
||||
(" 999.99 GB", 9.9999e11),
|
||||
("5000.00 GB", 5e12),
|
||||
],
|
||||
)
|
||||
def test_format_bytes(expected, total_bytes):
|
||||
assert fetch_strategy._format_bytes(total_bytes) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected,total_bytes,elapsed",
|
||||
[
|
||||
(" 0.0 B/s", 0, 0), # no time passed -- defaults to 1s.
|
||||
(" 0.0 B/s", 0, 1),
|
||||
(" 999.0 B/s", 999, 1),
|
||||
(" 1.0 KB/s", 1000, 1),
|
||||
(" 500.0 B/s", 1000, 2),
|
||||
(" 2.0 KB/s", 2048, 1),
|
||||
(" 1.0 MB/s", 1e6, 1),
|
||||
(" 500.0 KB/s", 1e6, 2),
|
||||
(" 12.3 MB/s", 1.23e7, 1),
|
||||
(" 1.2 GB/s", 1.23e9, 1),
|
||||
(" 999.9 GB/s", 9.999e11, 1),
|
||||
("5000.0 GB/s", 5e12, 1),
|
||||
],
|
||||
)
|
||||
def test_format_speed(expected, total_bytes, elapsed):
|
||||
assert fetch_strategy._format_speed(total_bytes, elapsed) == expected
|
||||
|
||||
|
||||
def test_fetch_progress_unknown_size():
|
||||
# time stamps in seconds, with 0.1s delta except 1.5 -> 1.55.
|
||||
time_stamps = iter([1.0, 1.5, 1.55, 2.0, 3.0, 5.0, 5.5, 5.5])
|
||||
progress = fetch_strategy.FetchProgress(total_bytes=None, get_time=lambda: next(time_stamps))
|
||||
assert progress.start_time == 1.0
|
||||
out = StringIO()
|
||||
|
||||
progress.advance(1000, out)
|
||||
assert progress.last_printed == 1.5
|
||||
progress.advance(50, out)
|
||||
assert progress.last_printed == 1.5 # does not print, too early after last print
|
||||
progress.advance(2000, out)
|
||||
assert progress.last_printed == 2.0
|
||||
progress.advance(3000, out)
|
||||
assert progress.last_printed == 3.0
|
||||
progress.advance(4000, out)
|
||||
assert progress.last_printed == 5.0
|
||||
progress.advance(4000, out)
|
||||
assert progress.last_printed == 5.5
|
||||
progress.print(final=True, out=out) # finalize download
|
||||
|
||||
outputs = [
|
||||
"\r [ | ] 1.00 KB @ 2.0 KB/s",
|
||||
"\r [ / ] 3.05 KB @ 3.0 KB/s",
|
||||
"\r [ - ] 6.05 KB @ 3.0 KB/s",
|
||||
"\r [ \\ ] 10.05 KB @ 2.5 KB/s", # have to escape \ here but is aligned in output
|
||||
"\r [ | ] 14.05 KB @ 3.1 KB/s",
|
||||
"\r [100%] 14.05 KB @ 3.1 KB/s\n", # final print: no spinner; newline
|
||||
]
|
||||
|
||||
assert out.getvalue() == "".join(outputs)
|
||||
|
||||
|
||||
def test_fetch_progress_known_size():
|
||||
time_stamps = iter([1.0, 1.5, 3.0, 4.0, 4.0])
|
||||
progress = fetch_strategy.FetchProgress(total_bytes=6000, get_time=lambda: next(time_stamps))
|
||||
out = StringIO()
|
||||
progress.advance(1000, out) # time 1.5
|
||||
progress.advance(2000, out) # time 3.0
|
||||
progress.advance(3000, out) # time 4.0
|
||||
progress.print(final=True, out=out)
|
||||
|
||||
outputs = [
|
||||
"\r [ 17%] 1.00 KB @ 2.0 KB/s",
|
||||
"\r [ 50%] 3.00 KB @ 1.5 KB/s",
|
||||
"\r [100%] 6.00 KB @ 2.0 KB/s",
|
||||
"\r [100%] 6.00 KB @ 2.0 KB/s\n", # final print has newline
|
||||
]
|
||||
|
||||
assert out.getvalue() == "".join(outputs)
|
||||
|
||||
|
||||
def test_fetch_progress_disabled():
|
||||
"""When disabled, FetchProgress shouldn't print anything when advanced"""
|
||||
|
||||
def get_time():
|
||||
raise RuntimeError("Should not be called")
|
||||
|
||||
progress = fetch_strategy.FetchProgress(enabled=False, get_time=get_time)
|
||||
out = StringIO()
|
||||
progress.advance(1000, out)
|
||||
progress.advance(2000, out)
|
||||
progress.print(final=True, out=out)
|
||||
assert progress.last_printed == 0
|
||||
assert not out.getvalue()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"header,value,total_bytes",
|
||||
[
|
||||
("Content-Length", "1234", 1234),
|
||||
("Content-Length", "0", 0),
|
||||
("Content-Length", "-10", 0),
|
||||
("Content-Length", "not a number", 0),
|
||||
("Not-Content-Length", "1234", 0),
|
||||
],
|
||||
)
|
||||
def test_fetch_progress_from_headers(header, value, total_bytes):
|
||||
time_stamps = iter([1.0, 1.5, 3.0, 4.0, 4.0])
|
||||
progress = fetch_strategy.FetchProgress.from_headers(
|
||||
{header: value}, get_time=lambda: next(time_stamps), enabled=True
|
||||
)
|
||||
assert progress.total_bytes == total_bytes
|
||||
assert progress.enabled
|
||||
assert progress.start_time == 1.0
|
||||
|
||||
|
||||
def test_fetch_progress_from_headers_disabled():
|
||||
progress = fetch_strategy.FetchProgress.from_headers(
|
||||
{"Content-Length": "1234"}, get_time=lambda: 1.0, enabled=False
|
||||
)
|
||||
assert not progress.enabled
|
||||
|
@ -6,53 +6,56 @@
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
from spack.environment.list import SpecListParser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
|
||||
DEFAULT_EXPANSION = [
|
||||
"mpileaks",
|
||||
"zmpi@1.0",
|
||||
"mpich@3.0",
|
||||
{"matrix": [["hypre"], ["%gcc@4.5.0", "%clang@3.3"]]},
|
||||
"libelf",
|
||||
]
|
||||
|
||||
DEFAULT_CONSTRAINTS = [
|
||||
[Spec("mpileaks")],
|
||||
[Spec("zmpi@1.0")],
|
||||
[Spec("mpich@3.0")],
|
||||
[Spec("hypre"), Spec("%gcc@4.5.0")],
|
||||
[Spec("hypre"), Spec("%clang@3.3")],
|
||||
[Spec("libelf")],
|
||||
]
|
||||
|
||||
DEFAULT_SPECS = [
|
||||
Spec("mpileaks"),
|
||||
Spec("zmpi@1.0"),
|
||||
Spec("mpich@3.0"),
|
||||
Spec("hypre%gcc@4.5.0"),
|
||||
Spec("hypre%clang@3.3"),
|
||||
Spec("libelf"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def parser_and_speclist():
|
||||
"""Default configuration of parser and user spec list for tests"""
|
||||
parser = SpecListParser()
|
||||
parser.parse_definitions(
|
||||
data=[
|
||||
{"gccs": ["%gcc@4.5.0"]},
|
||||
{"clangs": ["%clang@3.3"]},
|
||||
{"mpis": ["zmpi@1.0", "mpich@3.0"]},
|
||||
]
|
||||
)
|
||||
result = parser.parse_user_specs(
|
||||
name="specs",
|
||||
yaml_list=["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"],
|
||||
)
|
||||
return parser, result
|
||||
|
||||
|
||||
class TestSpecList:
|
||||
default_input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"]
|
||||
|
||||
default_reference = {
|
||||
"gccs": SpecList("gccs", ["%gcc@4.5.0"]),
|
||||
"clangs": SpecList("clangs", ["%clang@3.3"]),
|
||||
"mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
|
||||
}
|
||||
|
||||
default_expansion = [
|
||||
"mpileaks",
|
||||
"zmpi@1.0",
|
||||
"mpich@3.0",
|
||||
{"matrix": [["hypre"], ["%gcc@4.5.0", "%clang@3.3"]]},
|
||||
"libelf",
|
||||
]
|
||||
|
||||
default_constraints = [
|
||||
[Spec("mpileaks")],
|
||||
[Spec("zmpi@1.0")],
|
||||
[Spec("mpich@3.0")],
|
||||
[Spec("hypre"), Spec("%gcc@4.5.0")],
|
||||
[Spec("hypre"), Spec("%clang@3.3")],
|
||||
[Spec("libelf")],
|
||||
]
|
||||
|
||||
default_specs = [
|
||||
Spec("mpileaks"),
|
||||
Spec("zmpi@1.0"),
|
||||
Spec("mpich@3.0"),
|
||||
Spec("hypre%gcc@4.5.0"),
|
||||
Spec("hypre%clang@3.3"),
|
||||
Spec("libelf"),
|
||||
]
|
||||
|
||||
def test_spec_list_expansions(self):
|
||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
@pytest.mark.regression("28749")
|
||||
@pytest.mark.parametrize(
|
||||
"specs,expected",
|
||||
@ -86,116 +89,87 @@ def test_spec_list_expansions(self):
|
||||
],
|
||||
)
|
||||
def test_spec_list_constraint_ordering(self, specs, expected):
|
||||
speclist = SpecList("specs", specs)
|
||||
expected_specs = [Spec(x) for x in expected]
|
||||
assert speclist.specs == expected_specs
|
||||
result = SpecListParser().parse_user_specs(name="specs", yaml_list=specs)
|
||||
assert result.specs == [Spec(x) for x in expected]
|
||||
|
||||
def test_spec_list_add(self):
|
||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
||||
def test_mock_spec_list(self, parser_and_speclist):
|
||||
"""Tests expected properties on the default mock spec list"""
|
||||
parser, mock_list = parser_and_speclist
|
||||
assert mock_list.specs_as_yaml_list == DEFAULT_EXPANSION
|
||||
assert mock_list.specs_as_constraints == DEFAULT_CONSTRAINTS
|
||||
assert mock_list.specs == DEFAULT_SPECS
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
def test_spec_list_add(self, parser_and_speclist):
|
||||
parser, mock_list = parser_and_speclist
|
||||
mock_list.add("libdwarf")
|
||||
|
||||
speclist.add("libdwarf")
|
||||
assert mock_list.specs_as_yaml_list == DEFAULT_EXPANSION + ["libdwarf"]
|
||||
assert mock_list.specs_as_constraints == DEFAULT_CONSTRAINTS + [[Spec("libdwarf")]]
|
||||
assert mock_list.specs == DEFAULT_SPECS + [Spec("libdwarf")]
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion + ["libdwarf"]
|
||||
assert speclist.specs_as_constraints == self.default_constraints + [[Spec("libdwarf")]]
|
||||
assert speclist.specs == self.default_specs + [Spec("libdwarf")]
|
||||
def test_spec_list_remove(self, parser_and_speclist):
|
||||
parser, mock_list = parser_and_speclist
|
||||
mock_list.remove("libelf")
|
||||
|
||||
def test_spec_list_remove(self):
|
||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
||||
assert mock_list.specs_as_yaml_list + ["libelf"] == DEFAULT_EXPANSION
|
||||
assert mock_list.specs_as_constraints + [[Spec("libelf")]] == DEFAULT_CONSTRAINTS
|
||||
assert mock_list.specs + [Spec("libelf")] == DEFAULT_SPECS
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
speclist.remove("libelf")
|
||||
|
||||
assert speclist.specs_as_yaml_list + ["libelf"] == self.default_expansion
|
||||
|
||||
assert speclist.specs_as_constraints + [[Spec("libelf")]] == self.default_constraints
|
||||
|
||||
assert speclist.specs + [Spec("libelf")] == self.default_specs
|
||||
|
||||
def test_spec_list_update_reference(self):
|
||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
new_mpis = SpecList("mpis", self.default_reference["mpis"].yaml_list)
|
||||
new_mpis.add("mpich@3.3")
|
||||
new_reference = self.default_reference.copy()
|
||||
new_reference["mpis"] = new_mpis
|
||||
|
||||
speclist.update_reference(new_reference)
|
||||
|
||||
expansion = list(self.default_expansion)
|
||||
expansion.insert(3, "mpich@3.3")
|
||||
constraints = list(self.default_constraints)
|
||||
constraints.insert(3, [Spec("mpich@3.3")])
|
||||
specs = list(self.default_specs)
|
||||
specs.insert(3, Spec("mpich@3.3"))
|
||||
|
||||
assert speclist.specs_as_yaml_list == expansion
|
||||
assert speclist.specs_as_constraints == constraints
|
||||
assert speclist.specs == specs
|
||||
|
||||
def test_spec_list_extension(self):
|
||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
new_ref = self.default_reference.copy()
|
||||
otherlist = SpecList("specs", ["zlib", {"matrix": [["callpath"], ["%intel@18"]]}], new_ref)
|
||||
|
||||
speclist.extend(otherlist)
|
||||
|
||||
assert speclist.specs_as_yaml_list == (
|
||||
self.default_expansion + otherlist.specs_as_yaml_list
|
||||
def test_spec_list_extension(self, parser_and_speclist):
|
||||
parser, mock_list = parser_and_speclist
|
||||
other_list = parser.parse_user_specs(
|
||||
name="specs", yaml_list=[{"matrix": [["callpath"], ["%intel@18"]]}]
|
||||
)
|
||||
assert speclist.specs == self.default_specs + otherlist.specs
|
||||
assert speclist._reference is new_ref
|
||||
mock_list.extend(other_list)
|
||||
|
||||
assert mock_list.specs_as_yaml_list == (DEFAULT_EXPANSION + other_list.specs_as_yaml_list)
|
||||
assert mock_list.specs == DEFAULT_SPECS + other_list.specs
|
||||
|
||||
def test_spec_list_nested_matrices(self, parser_and_speclist):
|
||||
parser, _ = parser_and_speclist
|
||||
|
||||
def test_spec_list_nested_matrices(self):
|
||||
inner_matrix = [{"matrix": [["zlib", "libelf"], ["%gcc", "%intel"]]}]
|
||||
outer_addition = ["+shared", "~shared"]
|
||||
outer_matrix = [{"matrix": [inner_matrix, outer_addition]}]
|
||||
speclist = SpecList("specs", outer_matrix)
|
||||
result = parser.parse_user_specs(name="specs", yaml_list=outer_matrix)
|
||||
|
||||
expected_components = itertools.product(
|
||||
["zlib", "libelf"], ["%gcc", "%intel"], ["+shared", "~shared"]
|
||||
)
|
||||
expected = [Spec(" ".join(combo)) for combo in expected_components]
|
||||
assert set(speclist.specs) == set(expected)
|
||||
assert set(result.specs) == set(expected)
|
||||
|
||||
@pytest.mark.regression("16897")
|
||||
def test_spec_list_recursion_specs_as_constraints(self):
|
||||
input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$%gccs", "$%clangs"]]}, "libelf"]
|
||||
|
||||
reference = {
|
||||
"gccs": SpecList("gccs", ["gcc@4.5.0"]),
|
||||
"clangs": SpecList("clangs", ["clang@3.3"]),
|
||||
"mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
|
||||
}
|
||||
|
||||
speclist = SpecList("specs", input, reference)
|
||||
|
||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
def test_spec_list_matrix_exclude(self, mock_packages):
|
||||
# Test on non-boolean variants for regression for #16841
|
||||
matrix = [
|
||||
{"matrix": [["multivalue-variant"], ["foo=bar", "foo=baz"]], "exclude": ["foo=bar"]}
|
||||
definitions = [
|
||||
{"gccs": ["gcc@4.5.0"]},
|
||||
{"clangs": ["clang@3.3"]},
|
||||
{"mpis": ["zmpi@1.0", "mpich@3.0"]},
|
||||
]
|
||||
speclist = SpecList("specs", matrix)
|
||||
assert len(speclist.specs) == 1
|
||||
|
||||
parser = SpecListParser()
|
||||
parser.parse_definitions(data=definitions)
|
||||
result = parser.parse_user_specs(name="specs", yaml_list=input)
|
||||
|
||||
assert result.specs_as_yaml_list == DEFAULT_EXPANSION
|
||||
assert result.specs_as_constraints == DEFAULT_CONSTRAINTS
|
||||
assert result.specs == DEFAULT_SPECS
|
||||
|
||||
@pytest.mark.regression("16841")
|
||||
def test_spec_list_matrix_exclude(self, mock_packages):
|
||||
parser = SpecListParser()
|
||||
result = parser.parse_user_specs(
|
||||
name="specs",
|
||||
yaml_list=[
|
||||
{
|
||||
"matrix": [["multivalue-variant"], ["foo=bar", "foo=baz"]],
|
||||
"exclude": ["foo=bar"],
|
||||
}
|
||||
],
|
||||
)
|
||||
assert len(result.specs) == 1
|
||||
|
||||
def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_mockery):
|
||||
# Put mpich in the database so it can be referred to by hash.
|
||||
@ -205,9 +179,10 @@ def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_moc
|
||||
|
||||
# Create matrix and exclude +debug, which excludes the first mpich after its abstract hash
|
||||
# is resolved.
|
||||
speclist = SpecList(
|
||||
"specs",
|
||||
[
|
||||
parser = SpecListParser()
|
||||
result = parser.parse_user_specs(
|
||||
name="specs",
|
||||
yaml_list=[
|
||||
{
|
||||
"matrix": [
|
||||
["mpileaks"],
|
||||
@ -220,5 +195,5 @@ def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_moc
|
||||
)
|
||||
|
||||
# Ensure that only mpich~debug is selected, and that the assembled spec remains abstract.
|
||||
assert len(speclist.specs) == 1
|
||||
assert speclist.specs[0] == Spec(f"mpileaks ^callpath ^mpich/{mpich_2.dag_hash(5)}")
|
||||
assert len(result.specs) == 1
|
||||
assert result.specs[0] == Spec(f"mpileaks ^callpath ^mpich/{mpich_2.dag_hash(5)}")
|
||||
|
@ -638,7 +638,7 @@ def test_multivalued_variant_2(self):
|
||||
a = Spec("multivalue-variant foo=bar")
|
||||
b = Spec("multivalue-variant foo=bar,baz")
|
||||
# The specs are abstract and they **could** be constrained
|
||||
assert a.satisfies(b)
|
||||
assert b.satisfies(a) and not a.satisfies(b)
|
||||
# An abstract spec can instead be constrained
|
||||
assert a.constrain(b)
|
||||
|
||||
@ -973,13 +973,10 @@ def test_spec_formatting_bad_formats(self, default_mock_concretization, fmt_str)
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
spec.format(fmt_str)
|
||||
|
||||
def test_combination_of_wildcard_or_none(self):
|
||||
# Test that using 'none' and another value raises
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="cannot be combined"):
|
||||
Spec("multivalue-variant foo=none,bar")
|
||||
|
||||
# Test that using wildcard and another value raises
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="cannot be combined"):
|
||||
def test_wildcard_is_invalid_variant_value(self):
|
||||
"""The spec string x=* is parsed as a multi-valued variant with values the empty set.
|
||||
That excludes * as a literal variant value."""
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="cannot use reserved value"):
|
||||
Spec("multivalue-variant foo=*,bar")
|
||||
|
||||
def test_errors_in_variant_directive(self):
|
||||
@ -1368,6 +1365,18 @@ def test_splice_swap_names_mismatch_virtuals(self, default_mock_concretization,
|
||||
with pytest.raises(spack.spec.SpliceError, match="virtual"):
|
||||
vt.splice(vh, transitive)
|
||||
|
||||
def test_adaptor_optflags(self):
|
||||
"""Tests that we can obtain the list of optflags, and debugflags,
|
||||
from the compiler adaptor, and that this list is taken from the
|
||||
appropriate compiler package.
|
||||
"""
|
||||
# pkg-a depends on c, so only the gcc compiler should be chosen
|
||||
spec = spack.concretize.concretize_one(Spec("pkg-a %gcc"))
|
||||
assert "-Otestopt" in spec.package.compiler.opt_flags
|
||||
# This is not set, make sure we get an empty list
|
||||
for x in spec.package.compiler.debug_flags:
|
||||
pass
|
||||
|
||||
def test_spec_override(self):
|
||||
init_spec = Spec("pkg-a foo=baz foobar=baz cflags=-O3 cxxflags=-O1")
|
||||
change_spec = Spec("pkg-a foo=fee cflags=-O2")
|
||||
|
@ -633,6 +633,23 @@ def _specfile_for(spec_str, filename):
|
||||
],
|
||||
"zlib %[virtuals=fortran] gcc@14.1 %[virtuals=c,cxx] clang",
|
||||
),
|
||||
# test := and :== syntax for key value pairs
|
||||
(
|
||||
"gcc languages:=c,c++",
|
||||
[
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "gcc"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, "languages:=c,c++"),
|
||||
],
|
||||
"gcc languages:='c,c++'",
|
||||
),
|
||||
(
|
||||
"gcc languages:==c,c++",
|
||||
[
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "gcc"),
|
||||
Token(SpecTokens.PROPAGATED_KEY_VALUE_PAIR, "languages:==c,c++"),
|
||||
],
|
||||
"gcc languages:=='c,c++'",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_package):
|
||||
|
@ -433,6 +433,10 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
assert s2.format("{compiler.name}") == "gcc"
|
||||
assert s2.format("{compiler.version}") != "none"
|
||||
|
||||
# Ensure satisfies still works with compilers
|
||||
assert s2.satisfies("%gcc")
|
||||
assert s2.satisfies("%gcc@9.4.0")
|
||||
|
||||
|
||||
def test_anchorify_1():
|
||||
"""Test that anchorify replaces duplicate values with references to a single instance, and
|
||||
|
@ -334,20 +334,20 @@ def test_remove_complex_package_logic_filtered():
|
||||
"package_spec,expected_hash",
|
||||
[
|
||||
("amdfftw", "tivb752zddjgvfkogfs7cnnvp5olj6co"),
|
||||
("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
|
||||
("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
|
||||
("grads", "lomrsppasfxegyamz4r33zgwiqkveftv"),
|
||||
("llvm", "paicamlvy5jkgxw4xnacaxahrixe3f3i"),
|
||||
# has @when("@4.1.0") and raw unicode literals
|
||||
("mfem", "whwftpqbjvzncmb52oz6izkanbha2uji"),
|
||||
("mfem@4.0.0", "whwftpqbjvzncmb52oz6izkanbha2uji"),
|
||||
("mfem@4.1.0", "bpi7of3xelo7fr3ta2lm6bmiruijnxcg"),
|
||||
("mfem", "slf5qyyyhuj66mo5lpuhkrs35akh2zck"),
|
||||
("mfem@4.0.0", "slf5qyyyhuj66mo5lpuhkrs35akh2zck"),
|
||||
("mfem@4.1.0", "yo3ymaulytctas67zjn663ixw5cfyh5u"),
|
||||
# has @when("@1.5.0:")
|
||||
("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
|
||||
("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
|
||||
("py-torch", "m3ucsddqr7hjevtgx4cad34nrtqgyjfg"),
|
||||
("py-torch@1.0", "m3ucsddqr7hjevtgx4cad34nrtqgyjfg"),
|
||||
("py-torch@1.6", "insaxs6bq34rvyhajdbyr4wddqeqb2t3"),
|
||||
# has a print with multiple arguments
|
||||
("legion", "bq2etsik5l6pbryxmbhfhzynci56ruy4"),
|
||||
# has nested `with when()` blocks and loops
|
||||
("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
|
||||
("trilinos", "ojbtbu3p6gpa42sbilblo2ioanvhouxu"),
|
||||
],
|
||||
)
|
||||
def test_package_hash_consistency(package_spec, expected_hash):
|
||||
|
@ -12,7 +12,6 @@
|
||||
import spack.variant
|
||||
from spack.spec import Spec, VariantMap
|
||||
from spack.variant import (
|
||||
AbstractVariant,
|
||||
BoolValuedVariant,
|
||||
DuplicateVariantError,
|
||||
InconsistentValidationError,
|
||||
@ -22,6 +21,7 @@
|
||||
SingleValuedVariant,
|
||||
UnsatisfiableVariantSpecError,
|
||||
Variant,
|
||||
VariantValue,
|
||||
disjoint_sets,
|
||||
)
|
||||
|
||||
@ -29,173 +29,108 @@
|
||||
class TestMultiValuedVariant:
|
||||
def test_initialization(self):
|
||||
# Basic properties
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
assert repr(a) == "MultiValuedVariant('foo', 'bar,baz')"
|
||||
assert str(a) == "foo=bar,baz"
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
assert str(a) == "foo:=bar,baz"
|
||||
assert a.values == ("bar", "baz")
|
||||
assert a.value == ("bar", "baz")
|
||||
assert "bar" in a
|
||||
assert "baz" in a
|
||||
assert eval(repr(a)) == a
|
||||
|
||||
# Spaces are trimmed
|
||||
b = MultiValuedVariant("foo", "bar, baz")
|
||||
assert repr(b) == "MultiValuedVariant('foo', 'bar, baz')"
|
||||
assert str(b) == "foo=bar,baz"
|
||||
assert b.value == ("bar", "baz")
|
||||
assert "bar" in b
|
||||
assert "baz" in b
|
||||
assert a == b
|
||||
assert hash(a) == hash(b)
|
||||
assert eval(repr(b)) == a
|
||||
|
||||
# Order is not important
|
||||
c = MultiValuedVariant("foo", "baz, bar")
|
||||
assert repr(c) == "MultiValuedVariant('foo', 'baz, bar')"
|
||||
assert str(c) == "foo=bar,baz"
|
||||
assert c.value == ("bar", "baz")
|
||||
c = MultiValuedVariant("foo", ("baz", "bar"))
|
||||
assert str(c) == "foo:=bar,baz"
|
||||
assert c.values == ("bar", "baz")
|
||||
assert "bar" in c
|
||||
assert "baz" in c
|
||||
assert a == c
|
||||
assert hash(a) == hash(c)
|
||||
assert eval(repr(c)) == a
|
||||
|
||||
# Check the copy
|
||||
d = a.copy()
|
||||
assert repr(a) == repr(d)
|
||||
assert str(a) == str(d)
|
||||
assert d.value == ("bar", "baz")
|
||||
assert d.values == ("bar", "baz")
|
||||
assert "bar" in d
|
||||
assert "baz" in d
|
||||
assert a == d
|
||||
assert a is not d
|
||||
assert hash(a) == hash(d)
|
||||
assert eval(repr(d)) == a
|
||||
|
||||
def test_satisfies(self):
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b = MultiValuedVariant("foo", "bar")
|
||||
c = MultiValuedVariant("fee", "bar,baz")
|
||||
d = MultiValuedVariant("foo", "True")
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b = MultiValuedVariant("foo", ("bar",))
|
||||
c = MultiValuedVariant("fee", ("bar", "baz"))
|
||||
d = MultiValuedVariant("foo", (True,))
|
||||
|
||||
# 'foo=bar,baz' satisfies 'foo=bar'
|
||||
assert a.satisfies(b)
|
||||
|
||||
# 'foo=bar' does not satisfy 'foo=bar,baz'
|
||||
assert not b.satisfies(a)
|
||||
|
||||
# 'foo=bar,baz' does not satisfy 'foo=bar,baz' and vice-versa
|
||||
assert not a.satisfies(c)
|
||||
assert not c.satisfies(a)
|
||||
|
||||
# Implicit type conversion for variants of other types
|
||||
# concrete, different values do not satisfy each other
|
||||
assert not a.satisfies(b) and not b.satisfies(a)
|
||||
assert not a.satisfies(c) and not c.satisfies(a)
|
||||
|
||||
# SingleValuedVariant and MultiValuedVariant with the same single concrete value do satisfy
|
||||
# eachother
|
||||
b_sv = SingleValuedVariant("foo", "bar")
|
||||
assert b.satisfies(b_sv)
|
||||
d_sv = SingleValuedVariant("foo", "True")
|
||||
assert d.satisfies(d_sv)
|
||||
almost_d_bv = SingleValuedVariant("foo", "true")
|
||||
assert not d.satisfies(almost_d_bv)
|
||||
assert b.satisfies(b_sv) and b_sv.satisfies(b)
|
||||
d_sv = SingleValuedVariant("foo", True)
|
||||
assert d.satisfies(d_sv) and d_sv.satisfies(d)
|
||||
almost_d_bv = SingleValuedVariant("foo", True)
|
||||
assert d.satisfies(almost_d_bv)
|
||||
|
||||
d_bv = BoolValuedVariant("foo", "True")
|
||||
assert d.satisfies(d_bv)
|
||||
# This case is 'peculiar': the two BV instances are
|
||||
# equivalent, but if converted to MV they are not
|
||||
# as MV is case sensitive with respect to 'True' and 'False'
|
||||
almost_d_bv = BoolValuedVariant("foo", "true")
|
||||
assert not d.satisfies(almost_d_bv)
|
||||
d_bv = BoolValuedVariant("foo", True)
|
||||
assert d.satisfies(d_bv) and d_bv.satisfies(d)
|
||||
|
||||
def test_compatible(self):
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b = MultiValuedVariant("foo", "True")
|
||||
c = MultiValuedVariant("fee", "bar,baz")
|
||||
d = MultiValuedVariant("foo", "bar,barbaz")
|
||||
def test_intersects(self):
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b = MultiValuedVariant("foo", (True,))
|
||||
c = MultiValuedVariant("fee", ("bar", "baz"))
|
||||
d = MultiValuedVariant("foo", ("bar", "barbaz"))
|
||||
|
||||
# If the name of two multi-valued variants is the same,
|
||||
# they are compatible
|
||||
assert a.compatible(b)
|
||||
assert not a.compatible(c)
|
||||
assert a.compatible(d)
|
||||
# concrete, different values do not intersect.
|
||||
assert not a.intersects(b) and not b.intersects(a)
|
||||
assert not a.intersects(c) and not c.intersects(a)
|
||||
assert not a.intersects(d) and not d.intersects(a)
|
||||
assert not b.intersects(c) and not c.intersects(b)
|
||||
assert not b.intersects(d) and not d.intersects(b)
|
||||
assert not c.intersects(d) and not d.intersects(c)
|
||||
|
||||
assert b.compatible(a)
|
||||
assert not b.compatible(c)
|
||||
assert b.compatible(d)
|
||||
# SV and MV intersect if they have the same concrete value.
|
||||
b_sv = SingleValuedVariant("foo", True)
|
||||
assert b.intersects(b_sv)
|
||||
assert not c.intersects(b_sv)
|
||||
|
||||
assert not c.compatible(a)
|
||||
assert not c.compatible(b)
|
||||
assert not c.compatible(d)
|
||||
|
||||
assert d.compatible(a)
|
||||
assert d.compatible(b)
|
||||
assert not d.compatible(c)
|
||||
|
||||
# Implicit type conversion for other types
|
||||
|
||||
b_sv = SingleValuedVariant("foo", "True")
|
||||
assert b.compatible(b_sv)
|
||||
assert not c.compatible(b_sv)
|
||||
|
||||
b_bv = BoolValuedVariant("foo", "True")
|
||||
assert b.compatible(b_bv)
|
||||
assert not c.compatible(b_bv)
|
||||
# BoolValuedVariant intersects if the value is the same
|
||||
b_bv = BoolValuedVariant("foo", True)
|
||||
assert b.intersects(b_bv)
|
||||
assert not c.intersects(b_bv)
|
||||
|
||||
def test_constrain(self):
|
||||
# Try to constrain on a value with less constraints than self
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b = MultiValuedVariant("foo", "bar")
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = MultiValuedVariant("foo", "bar,baz")
|
||||
assert a == t
|
||||
|
||||
# Try to constrain on a value with more constraints than self
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b = MultiValuedVariant("foo", "bar")
|
||||
|
||||
changed = b.constrain(a)
|
||||
assert changed
|
||||
t = MultiValuedVariant("foo", "bar,baz")
|
||||
assert a == t
|
||||
# Concrete values cannot be constrained
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b = MultiValuedVariant("foo", ("bar",))
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
a.constrain(b)
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
b.constrain(a)
|
||||
|
||||
# Try to constrain on the same value
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b = a.copy()
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = MultiValuedVariant("foo", "bar,baz")
|
||||
assert a == t
|
||||
assert not a.constrain(b)
|
||||
assert a == b == MultiValuedVariant("foo", ("bar", "baz"))
|
||||
|
||||
# Try to constrain on a different name
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b = MultiValuedVariant("fee", "bar")
|
||||
a = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b = MultiValuedVariant("fee", ("bar",))
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
a.constrain(b)
|
||||
|
||||
# Implicit type conversion for variants of other types
|
||||
|
||||
a = MultiValuedVariant("foo", "bar,baz")
|
||||
b_sv = SingleValuedVariant("foo", "bar")
|
||||
c_sv = SingleValuedVariant("foo", "barbaz")
|
||||
|
||||
assert not a.constrain(b_sv)
|
||||
assert a.constrain(c_sv)
|
||||
|
||||
d_bv = BoolValuedVariant("foo", "True")
|
||||
|
||||
assert a.constrain(d_bv)
|
||||
assert not a.constrain(d_bv)
|
||||
|
||||
def test_yaml_entry(self):
|
||||
a = MultiValuedVariant("foo", "bar,baz,barbaz")
|
||||
b = MultiValuedVariant("foo", "bar, baz, barbaz")
|
||||
expected = ("foo", sorted(["bar", "baz", "barbaz"]))
|
||||
a = MultiValuedVariant("foo", ("bar", "baz", "barbaz"))
|
||||
expected = ("foo", sorted(("bar", "baz", "barbaz")))
|
||||
|
||||
assert a.yaml_entry() == expected
|
||||
assert b.yaml_entry() == expected
|
||||
|
||||
a = MultiValuedVariant("foo", "bar")
|
||||
a = MultiValuedVariant("foo", ("bar",))
|
||||
expected = ("foo", sorted(["bar"]))
|
||||
|
||||
assert a.yaml_entry() == expected
|
||||
@ -205,152 +140,76 @@ class TestSingleValuedVariant:
|
||||
def test_initialization(self):
|
||||
# Basic properties
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
assert repr(a) == "SingleValuedVariant('foo', 'bar')"
|
||||
assert str(a) == "foo=bar"
|
||||
assert a.values == ("bar",)
|
||||
assert a.value == "bar"
|
||||
assert "bar" in a
|
||||
assert eval(repr(a)) == a
|
||||
|
||||
# Raise if multiple values are passed
|
||||
with pytest.raises(ValueError):
|
||||
SingleValuedVariant("foo", "bar, baz")
|
||||
|
||||
# Check the copy
|
||||
b = a.copy()
|
||||
assert repr(a) == repr(b)
|
||||
assert str(a) == str(b)
|
||||
assert b.values == ("bar",)
|
||||
assert b.value == "bar"
|
||||
assert "bar" in b
|
||||
assert a == b
|
||||
assert a is not b
|
||||
assert hash(a) == hash(b)
|
||||
assert eval(repr(b)) == a
|
||||
|
||||
def test_satisfies(self):
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = SingleValuedVariant("foo", "bar")
|
||||
c = SingleValuedVariant("foo", "baz")
|
||||
d = SingleValuedVariant("fee", "bar")
|
||||
e = SingleValuedVariant("foo", "True")
|
||||
|
||||
# 'foo=bar' can only satisfy 'foo=bar'
|
||||
assert a.satisfies(b)
|
||||
assert not a.satisfies(c)
|
||||
assert not a.satisfies(d)
|
||||
# concrete, different values do not satisfy each other
|
||||
assert not a.satisfies(c) and not c.satisfies(a)
|
||||
assert not a.satisfies(d) and not d.satisfies(a)
|
||||
assert not b.satisfies(c) and not c.satisfies(b)
|
||||
assert not b.satisfies(d) and not d.satisfies(b)
|
||||
assert not c.satisfies(d) and not d.satisfies(c)
|
||||
|
||||
assert b.satisfies(a)
|
||||
assert not b.satisfies(c)
|
||||
assert not b.satisfies(d)
|
||||
assert a.satisfies(b) and b.satisfies(a)
|
||||
|
||||
assert not c.satisfies(a)
|
||||
assert not c.satisfies(b)
|
||||
assert not c.satisfies(d)
|
||||
|
||||
# Implicit type conversion for variants of other types
|
||||
|
||||
a_mv = MultiValuedVariant("foo", "bar")
|
||||
assert a.satisfies(a_mv)
|
||||
multiple_values = MultiValuedVariant("foo", "bar,baz")
|
||||
assert not a.satisfies(multiple_values)
|
||||
|
||||
e_bv = BoolValuedVariant("foo", "True")
|
||||
assert e.satisfies(e_bv)
|
||||
almost_e_bv = BoolValuedVariant("foo", "true")
|
||||
assert not e.satisfies(almost_e_bv)
|
||||
|
||||
def test_compatible(self):
|
||||
def test_intersects(self):
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = SingleValuedVariant("fee", "bar")
|
||||
c = SingleValuedVariant("foo", "baz")
|
||||
d = SingleValuedVariant("foo", "bar")
|
||||
|
||||
# If the name of two multi-valued variants is the same,
|
||||
# they are compatible
|
||||
assert not a.compatible(b)
|
||||
assert not a.compatible(c)
|
||||
assert a.compatible(d)
|
||||
# concrete, different values do not intersect
|
||||
assert not a.intersects(b) and not b.intersects(a)
|
||||
assert not a.intersects(c) and not c.intersects(a)
|
||||
assert not b.intersects(c) and not c.intersects(b)
|
||||
assert not b.intersects(d) and not d.intersects(b)
|
||||
assert not c.intersects(d) and not d.intersects(c)
|
||||
|
||||
assert not b.compatible(a)
|
||||
assert not b.compatible(c)
|
||||
assert not b.compatible(d)
|
||||
|
||||
assert not c.compatible(a)
|
||||
assert not c.compatible(b)
|
||||
assert not c.compatible(d)
|
||||
|
||||
assert d.compatible(a)
|
||||
assert not d.compatible(b)
|
||||
assert not d.compatible(c)
|
||||
|
||||
# Implicit type conversion for variants of other types
|
||||
|
||||
a_mv = MultiValuedVariant("foo", "bar")
|
||||
b_mv = MultiValuedVariant("fee", "bar")
|
||||
c_mv = MultiValuedVariant("foo", "baz")
|
||||
d_mv = MultiValuedVariant("foo", "bar")
|
||||
|
||||
assert not a.compatible(b_mv)
|
||||
assert not a.compatible(c_mv)
|
||||
assert a.compatible(d_mv)
|
||||
|
||||
assert not b.compatible(a_mv)
|
||||
assert not b.compatible(c_mv)
|
||||
assert not b.compatible(d_mv)
|
||||
|
||||
assert not c.compatible(a_mv)
|
||||
assert not c.compatible(b_mv)
|
||||
assert not c.compatible(d_mv)
|
||||
|
||||
assert d.compatible(a_mv)
|
||||
assert not d.compatible(b_mv)
|
||||
assert not d.compatible(c_mv)
|
||||
|
||||
e = SingleValuedVariant("foo", "True")
|
||||
e_bv = BoolValuedVariant("foo", "True")
|
||||
almost_e_bv = BoolValuedVariant("foo", "true")
|
||||
|
||||
assert e.compatible(e_bv)
|
||||
assert not e.compatible(almost_e_bv)
|
||||
assert a.intersects(d) and d.intersects(a)
|
||||
|
||||
def test_constrain(self):
|
||||
# Try to constrain on a value equal to self
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = SingleValuedVariant("foo", "bar")
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = SingleValuedVariant("foo", "bar")
|
||||
assert a == t
|
||||
assert not a.constrain(b)
|
||||
assert a == SingleValuedVariant("foo", "bar")
|
||||
|
||||
# Try to constrain on a value with a different value
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = SingleValuedVariant("foo", "baz")
|
||||
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
b.constrain(a)
|
||||
|
||||
# Try to constrain on a value with a different value
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = SingleValuedVariant("fee", "bar")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
b.constrain(a)
|
||||
|
||||
# Try to constrain on the same value
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
b = a.copy()
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = SingleValuedVariant("foo", "bar")
|
||||
assert a == t
|
||||
|
||||
# Implicit type conversion for variants of other types
|
||||
a = SingleValuedVariant("foo", "True")
|
||||
mv = MultiValuedVariant("foo", "True")
|
||||
bv = BoolValuedVariant("foo", "True")
|
||||
for v in (mv, bv):
|
||||
assert not a.constrain(v)
|
||||
assert not a.constrain(b)
|
||||
assert a == SingleValuedVariant("foo", "bar")
|
||||
|
||||
def test_yaml_entry(self):
|
||||
a = SingleValuedVariant("foo", "bar")
|
||||
@ -362,129 +221,94 @@ def test_yaml_entry(self):
|
||||
class TestBoolValuedVariant:
|
||||
def test_initialization(self):
|
||||
# Basic properties - True value
|
||||
for v in (True, "True", "TRUE", "TrUe"):
|
||||
a = BoolValuedVariant("foo", v)
|
||||
assert repr(a) == "BoolValuedVariant('foo', {0})".format(repr(v))
|
||||
assert str(a) == "+foo"
|
||||
assert a.value is True
|
||||
assert True in a
|
||||
assert eval(repr(a)) == a
|
||||
a = BoolValuedVariant("foo", True)
|
||||
assert str(a) == "+foo"
|
||||
assert a.value is True
|
||||
assert a.values == (True,)
|
||||
assert True in a
|
||||
|
||||
# Copy - True value
|
||||
b = a.copy()
|
||||
assert repr(a) == repr(b)
|
||||
assert str(a) == str(b)
|
||||
assert b.value is True
|
||||
assert b.values == (True,)
|
||||
assert True in b
|
||||
assert a == b
|
||||
assert a is not b
|
||||
assert hash(a) == hash(b)
|
||||
assert eval(repr(b)) == a
|
||||
|
||||
# Basic properties - False value
|
||||
for v in (False, "False", "FALSE", "FaLsE"):
|
||||
a = BoolValuedVariant("foo", v)
|
||||
assert repr(a) == "BoolValuedVariant('foo', {0})".format(repr(v))
|
||||
assert str(a) == "~foo"
|
||||
assert a.value is False
|
||||
assert False in a
|
||||
assert eval(repr(a)) == a
|
||||
|
||||
# Copy - True value
|
||||
# Copy - False value
|
||||
a = BoolValuedVariant("foo", False)
|
||||
b = a.copy()
|
||||
assert repr(a) == repr(b)
|
||||
assert str(a) == str(b)
|
||||
assert b.value is False
|
||||
assert b.values == (False,)
|
||||
assert False in b
|
||||
assert a == b
|
||||
assert a is not b
|
||||
assert eval(repr(b)) == a
|
||||
|
||||
# Invalid values
|
||||
for v in ("bar", "bar,baz"):
|
||||
with pytest.raises(ValueError):
|
||||
BoolValuedVariant("foo", v)
|
||||
|
||||
def test_satisfies(self):
|
||||
a = BoolValuedVariant("foo", True)
|
||||
b = BoolValuedVariant("foo", False)
|
||||
c = BoolValuedVariant("fee", False)
|
||||
d = BoolValuedVariant("foo", "True")
|
||||
d = BoolValuedVariant("foo", True)
|
||||
|
||||
assert not a.satisfies(b)
|
||||
assert not a.satisfies(c)
|
||||
assert a.satisfies(d)
|
||||
# concrete, different values do not satisfy each other
|
||||
assert not a.satisfies(b) and not b.satisfies(a)
|
||||
assert not a.satisfies(c) and not c.satisfies(a)
|
||||
assert not b.satisfies(c) and not c.satisfies(b)
|
||||
assert not b.satisfies(d) and not d.satisfies(b)
|
||||
assert not c.satisfies(d) and not d.satisfies(c)
|
||||
|
||||
assert not b.satisfies(a)
|
||||
assert not b.satisfies(c)
|
||||
assert not b.satisfies(d)
|
||||
assert a.satisfies(d) and d.satisfies(a)
|
||||
|
||||
assert not c.satisfies(a)
|
||||
assert not c.satisfies(b)
|
||||
assert not c.satisfies(d)
|
||||
# # BV variants are case insensitive to 'True' or 'False'
|
||||
# d_mv = MultiValuedVariant("foo", "True")
|
||||
# assert d.satisfies(d_mv)
|
||||
# assert not b.satisfies(d_mv)
|
||||
|
||||
assert d.satisfies(a)
|
||||
assert not d.satisfies(b)
|
||||
assert not d.satisfies(c)
|
||||
# d_mv = MultiValuedVariant("foo", "FaLsE")
|
||||
# assert not d.satisfies(d_mv)
|
||||
# assert b.satisfies(d_mv)
|
||||
|
||||
# BV variants are case insensitive to 'True' or 'False'
|
||||
d_mv = MultiValuedVariant("foo", "True")
|
||||
assert d.satisfies(d_mv)
|
||||
assert not b.satisfies(d_mv)
|
||||
# d_mv = MultiValuedVariant("foo", "bar")
|
||||
# assert not d.satisfies(d_mv)
|
||||
# assert not b.satisfies(d_mv)
|
||||
|
||||
d_mv = MultiValuedVariant("foo", "FaLsE")
|
||||
assert not d.satisfies(d_mv)
|
||||
assert b.satisfies(d_mv)
|
||||
# d_sv = SingleValuedVariant("foo", "True")
|
||||
# assert d.satisfies(d_sv)
|
||||
|
||||
d_mv = MultiValuedVariant("foo", "bar")
|
||||
assert not d.satisfies(d_mv)
|
||||
assert not b.satisfies(d_mv)
|
||||
|
||||
d_sv = SingleValuedVariant("foo", "True")
|
||||
assert d.satisfies(d_sv)
|
||||
|
||||
def test_compatible(self):
|
||||
def test_intersects(self):
|
||||
a = BoolValuedVariant("foo", True)
|
||||
b = BoolValuedVariant("fee", True)
|
||||
c = BoolValuedVariant("foo", False)
|
||||
d = BoolValuedVariant("foo", "True")
|
||||
d = BoolValuedVariant("foo", True)
|
||||
|
||||
# If the name of two multi-valued variants is the same,
|
||||
# they are compatible
|
||||
assert not a.compatible(b)
|
||||
assert not a.compatible(c)
|
||||
assert a.compatible(d)
|
||||
# concrete, different values do not intersect each other
|
||||
assert not a.intersects(b) and not b.intersects(a)
|
||||
assert not a.intersects(c) and not c.intersects(a)
|
||||
assert not b.intersects(c) and not c.intersects(b)
|
||||
assert not b.intersects(d) and not d.intersects(b)
|
||||
assert not c.intersects(d) and not d.intersects(c)
|
||||
|
||||
assert not b.compatible(a)
|
||||
assert not b.compatible(c)
|
||||
assert not b.compatible(d)
|
||||
assert a.intersects(d) and d.intersects(a)
|
||||
|
||||
assert not c.compatible(a)
|
||||
assert not c.compatible(b)
|
||||
assert not c.compatible(d)
|
||||
# for value in ("True", "TrUe", "TRUE"):
|
||||
# d_mv = MultiValuedVariant("foo", value)
|
||||
# assert d.intersects(d_mv)
|
||||
# assert not c.intersects(d_mv)
|
||||
|
||||
assert d.compatible(a)
|
||||
assert not d.compatible(b)
|
||||
assert not d.compatible(c)
|
||||
|
||||
for value in ("True", "TrUe", "TRUE"):
|
||||
d_mv = MultiValuedVariant("foo", value)
|
||||
assert d.compatible(d_mv)
|
||||
assert not c.compatible(d_mv)
|
||||
|
||||
d_sv = SingleValuedVariant("foo", value)
|
||||
assert d.compatible(d_sv)
|
||||
assert not c.compatible(d_sv)
|
||||
# d_sv = SingleValuedVariant("foo", value)
|
||||
# assert d.intersects(d_sv)
|
||||
# assert not c.intersects(d_sv)
|
||||
|
||||
def test_constrain(self):
|
||||
# Try to constrain on a value equal to self
|
||||
a = BoolValuedVariant("foo", "True")
|
||||
a = BoolValuedVariant("foo", True)
|
||||
b = BoolValuedVariant("foo", True)
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = BoolValuedVariant("foo", True)
|
||||
assert a == t
|
||||
assert not a.constrain(b)
|
||||
assert a == BoolValuedVariant("foo", True)
|
||||
|
||||
# Try to constrain on a value with a different value
|
||||
a = BoolValuedVariant("foo", True)
|
||||
@ -497,44 +321,35 @@ def test_constrain(self):
|
||||
a = BoolValuedVariant("foo", True)
|
||||
b = BoolValuedVariant("fee", True)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
b.constrain(a)
|
||||
|
||||
# Try to constrain on the same value
|
||||
a = BoolValuedVariant("foo", True)
|
||||
b = a.copy()
|
||||
|
||||
changed = a.constrain(b)
|
||||
assert not changed
|
||||
t = BoolValuedVariant("foo", True)
|
||||
assert a == t
|
||||
|
||||
# Try to constrain on other values
|
||||
a = BoolValuedVariant("foo", "True")
|
||||
sv = SingleValuedVariant("foo", "True")
|
||||
mv = MultiValuedVariant("foo", "True")
|
||||
for v in (sv, mv):
|
||||
assert not a.constrain(v)
|
||||
assert not a.constrain(b)
|
||||
assert a == BoolValuedVariant("foo", True)
|
||||
|
||||
def test_yaml_entry(self):
|
||||
a = BoolValuedVariant("foo", "True")
|
||||
a = BoolValuedVariant("foo", True)
|
||||
expected = ("foo", True)
|
||||
assert a.yaml_entry() == expected
|
||||
|
||||
a = BoolValuedVariant("foo", "False")
|
||||
a = BoolValuedVariant("foo", False)
|
||||
expected = ("foo", False)
|
||||
assert a.yaml_entry() == expected
|
||||
|
||||
|
||||
def test_from_node_dict():
|
||||
a = MultiValuedVariant.from_node_dict("foo", ["bar"])
|
||||
assert type(a) is MultiValuedVariant
|
||||
a = VariantValue.from_node_dict("foo", ["bar"])
|
||||
assert a.type == spack.variant.VariantType.MULTI
|
||||
|
||||
a = MultiValuedVariant.from_node_dict("foo", "bar")
|
||||
assert type(a) is SingleValuedVariant
|
||||
a = VariantValue.from_node_dict("foo", "bar")
|
||||
assert a.type == spack.variant.VariantType.SINGLE
|
||||
|
||||
a = MultiValuedVariant.from_node_dict("foo", "true")
|
||||
assert type(a) is BoolValuedVariant
|
||||
a = VariantValue.from_node_dict("foo", "true")
|
||||
assert a.type == spack.variant.VariantType.BOOL
|
||||
|
||||
|
||||
class TestVariant:
|
||||
@ -548,7 +363,7 @@ def test_validation(self):
|
||||
|
||||
# Multiple values are not allowed
|
||||
with pytest.raises(MultipleValuesInExclusiveVariantError):
|
||||
vspec.value = "bar,baz"
|
||||
vspec.set("bar", "baz")
|
||||
|
||||
# Inconsistent vspec
|
||||
vspec.name = "FOO"
|
||||
@ -557,10 +372,10 @@ def test_validation(self):
|
||||
|
||||
# Valid multi-value vspec
|
||||
a.multi = True
|
||||
vspec = a.make_variant("bar,baz")
|
||||
vspec = a.make_variant("bar", "baz")
|
||||
a.validate_or_raise(vspec, "test-package")
|
||||
# Add an invalid value
|
||||
vspec.value = "bar,baz,barbaz"
|
||||
vspec.set("bar", "baz", "barbaz")
|
||||
with pytest.raises(InvalidVariantValueError):
|
||||
a.validate_or_raise(vspec, "test-package")
|
||||
|
||||
@ -571,12 +386,12 @@ def validator(x):
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
a = Variant("foo", default=1024, description="", values=validator, multi=False)
|
||||
a = Variant("foo", default="1024", description="", values=validator, multi=False)
|
||||
vspec = a.make_default()
|
||||
a.validate_or_raise(vspec, "test-package")
|
||||
vspec.value = 2056
|
||||
vspec.set("2056")
|
||||
a.validate_or_raise(vspec, "test-package")
|
||||
vspec.value = "foo"
|
||||
vspec.set("foo")
|
||||
with pytest.raises(InvalidVariantValueError):
|
||||
a.validate_or_raise(vspec, "test-package")
|
||||
|
||||
@ -606,9 +421,9 @@ def test_invalid_values(self) -> None:
|
||||
a["foo"] = 2
|
||||
|
||||
# Duplicate variant
|
||||
a["foo"] = MultiValuedVariant("foo", "bar,baz")
|
||||
a["foo"] = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
with pytest.raises(DuplicateVariantError):
|
||||
a["foo"] = MultiValuedVariant("foo", "bar")
|
||||
a["foo"] = MultiValuedVariant("foo", ("bar",))
|
||||
|
||||
with pytest.raises(DuplicateVariantError):
|
||||
a["foo"] = SingleValuedVariant("foo", "bar")
|
||||
@ -618,7 +433,7 @@ def test_invalid_values(self) -> None:
|
||||
|
||||
# Non matching names between key and vspec.name
|
||||
with pytest.raises(KeyError):
|
||||
a["bar"] = MultiValuedVariant("foo", "bar")
|
||||
a["bar"] = MultiValuedVariant("foo", ("bar",))
|
||||
|
||||
def test_set_item(self) -> None:
|
||||
# Check that all the three types of variants are accepted
|
||||
@ -626,7 +441,7 @@ def test_set_item(self) -> None:
|
||||
|
||||
a["foo"] = BoolValuedVariant("foo", True)
|
||||
a["bar"] = SingleValuedVariant("bar", "baz")
|
||||
a["foobar"] = MultiValuedVariant("foobar", "a, b, c, d, e")
|
||||
a["foobar"] = MultiValuedVariant("foobar", ("a", "b", "c", "d", "e"))
|
||||
|
||||
def test_substitute(self) -> None:
|
||||
# Check substitution of a key that exists
|
||||
@ -642,48 +457,47 @@ def test_substitute(self) -> None:
|
||||
def test_satisfies_and_constrain(self) -> None:
|
||||
# foo=bar foobar=fee feebar=foo
|
||||
a = VariantMap(Spec())
|
||||
a["foo"] = MultiValuedVariant("foo", "bar")
|
||||
a["foo"] = MultiValuedVariant("foo", ("bar",))
|
||||
a["foobar"] = SingleValuedVariant("foobar", "fee")
|
||||
a["feebar"] = SingleValuedVariant("feebar", "foo")
|
||||
|
||||
# foo=bar,baz foobar=fee shared=True
|
||||
b = VariantMap(Spec())
|
||||
b["foo"] = MultiValuedVariant("foo", "bar, baz")
|
||||
b["foo"] = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
b["foobar"] = SingleValuedVariant("foobar", "fee")
|
||||
b["shared"] = BoolValuedVariant("shared", True)
|
||||
|
||||
assert a.intersects(b)
|
||||
assert b.intersects(a)
|
||||
|
||||
assert not a.satisfies(b)
|
||||
assert not b.satisfies(a)
|
||||
# concrete, different values do not intersect / satisfy each other
|
||||
assert not a.intersects(b) and not b.intersects(a)
|
||||
assert not a.satisfies(b) and not b.satisfies(a)
|
||||
|
||||
# foo=bar,baz foobar=fee feebar=foo shared=True
|
||||
c = VariantMap(Spec())
|
||||
c["foo"] = MultiValuedVariant("foo", "bar, baz")
|
||||
c["foo"] = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
c["foobar"] = SingleValuedVariant("foobar", "fee")
|
||||
c["feebar"] = SingleValuedVariant("feebar", "foo")
|
||||
c["shared"] = BoolValuedVariant("shared", True)
|
||||
|
||||
assert a.constrain(b)
|
||||
assert a == c
|
||||
# concrete values cannot be constrained
|
||||
with pytest.raises(spack.variant.UnsatisfiableVariantSpecError):
|
||||
a.constrain(b)
|
||||
|
||||
def test_copy(self) -> None:
|
||||
a = VariantMap(Spec())
|
||||
a["foo"] = BoolValuedVariant("foo", True)
|
||||
a["bar"] = SingleValuedVariant("bar", "baz")
|
||||
a["foobar"] = MultiValuedVariant("foobar", "a, b, c, d, e")
|
||||
a["foobar"] = MultiValuedVariant("foobar", ("a", "b", "c", "d", "e"))
|
||||
|
||||
c = a.copy()
|
||||
assert a == c
|
||||
|
||||
def test_str(self) -> None:
|
||||
c = VariantMap(Spec())
|
||||
c["foo"] = MultiValuedVariant("foo", "bar, baz")
|
||||
c["foo"] = MultiValuedVariant("foo", ("bar", "baz"))
|
||||
c["foobar"] = SingleValuedVariant("foobar", "fee")
|
||||
c["feebar"] = SingleValuedVariant("feebar", "foo")
|
||||
c["shared"] = BoolValuedVariant("shared", True)
|
||||
assert str(c) == "+shared feebar=foo foo=bar,baz foobar=fee"
|
||||
assert str(c) == "+shared feebar=foo foo:=bar,baz foobar=fee"
|
||||
|
||||
|
||||
def test_disjoint_set_initialization_errors():
|
||||
@ -778,10 +592,10 @@ def test_wild_card_valued_variants_equivalent_to_str():
|
||||
|
||||
several_arbitrary_values = ("doe", "re", "mi")
|
||||
# "*" case
|
||||
wild_output = wild_var.make_variant(several_arbitrary_values)
|
||||
wild_output = wild_var.make_variant(*several_arbitrary_values)
|
||||
wild_var.validate_or_raise(wild_output, "test-package")
|
||||
# str case
|
||||
str_output = str_var.make_variant(several_arbitrary_values)
|
||||
str_output = str_var.make_variant(*several_arbitrary_values)
|
||||
str_var.validate_or_raise(str_output, "test-package")
|
||||
# equivalence each instance already validated
|
||||
assert str_output.value == wild_output.value
|
||||
@ -900,33 +714,175 @@ def test_concretize_variant_default_with_multiple_defs(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec,variant_name,after",
|
||||
"spec,variant_name,narrowed_type",
|
||||
[
|
||||
# dev_path is a special case
|
||||
("foo dev_path=/path/to/source", "dev_path", SingleValuedVariant),
|
||||
("foo dev_path=/path/to/source", "dev_path", spack.variant.VariantType.SINGLE),
|
||||
# reserved name: won't be touched
|
||||
("foo patches=2349dc44", "patches", AbstractVariant),
|
||||
("foo patches=2349dc44", "patches", spack.variant.VariantType.MULTI),
|
||||
# simple case -- one definition applies
|
||||
("variant-values@1.0 v=foo", "v", SingleValuedVariant),
|
||||
("variant-values@1.0 v=foo", "v", spack.variant.VariantType.SINGLE),
|
||||
# simple, but with bool valued variant
|
||||
("pkg-a bvv=true", "bvv", BoolValuedVariant),
|
||||
# variant doesn't exist at version
|
||||
("variant-values@4.0 v=bar", "v", spack.spec.InvalidVariantForSpecError),
|
||||
# multiple definitions, so not yet knowable
|
||||
("variant-values@2.0 v=bar", "v", AbstractVariant),
|
||||
("pkg-a bvv=true", "bvv", spack.variant.VariantType.BOOL),
|
||||
# takes the second definition, which overrides the single-valued one
|
||||
("variant-values@2.0 v=bar", "v", spack.variant.VariantType.MULTI),
|
||||
],
|
||||
)
|
||||
def test_substitute_abstract_variants(mock_packages, spec, variant_name, after):
|
||||
def test_substitute_abstract_variants_narrowing(mock_packages, spec, variant_name, narrowed_type):
|
||||
spec = Spec(spec)
|
||||
spack.spec.substitute_abstract_variants(spec)
|
||||
assert spec.variants[variant_name].type == narrowed_type
|
||||
|
||||
# all variants start out as AbstractVariant
|
||||
assert isinstance(spec.variants[variant_name], AbstractVariant)
|
||||
|
||||
if issubclass(after, Exception):
|
||||
# if we're checking for an error, use pytest.raises
|
||||
with pytest.raises(after):
|
||||
spack.spec.substitute_abstract_variants(spec)
|
||||
else:
|
||||
# ensure that the type of the variant on the spec has been narrowed (or not)
|
||||
spack.spec.substitute_abstract_variants(spec)
|
||||
assert isinstance(spec.variants[variant_name], after)
|
||||
def test_substitute_abstract_variants_failure(mock_packages):
|
||||
with pytest.raises(spack.spec.InvalidVariantForSpecError):
|
||||
# variant doesn't exist at version
|
||||
spack.spec.substitute_abstract_variants(Spec("variant-values@4.0 v=bar"))
|
||||
|
||||
|
||||
def test_abstract_variant_satisfies_abstract_abstract():
|
||||
# rhs should be a subset of lhs
|
||||
assert Spec("foo=bar").satisfies("foo=bar")
|
||||
assert Spec("foo=bar,baz").satisfies("foo=bar")
|
||||
assert Spec("foo=bar,baz").satisfies("foo=bar,baz")
|
||||
assert not Spec("foo=bar").satisfies("foo=baz")
|
||||
assert not Spec("foo=bar").satisfies("foo=bar,baz")
|
||||
assert Spec("foo=bar").satisfies("foo=*") # rhs empty set
|
||||
assert Spec("foo=*").satisfies("foo=*") # lhs and rhs empty set
|
||||
assert not Spec("foo=*").satisfies("foo=bar") # lhs empty set, rhs not
|
||||
|
||||
|
||||
def test_abstract_variant_satisfies_concrete_abstract():
|
||||
# rhs should be a subset of lhs
|
||||
assert Spec("foo:=bar").satisfies("foo=bar")
|
||||
assert Spec("foo:=bar,baz").satisfies("foo=bar")
|
||||
assert Spec("foo:=bar,baz").satisfies("foo=bar,baz")
|
||||
assert not Spec("foo:=bar").satisfies("foo=baz")
|
||||
assert not Spec("foo:=bar").satisfies("foo=bar,baz")
|
||||
assert Spec("foo:=bar").satisfies("foo=*") # rhs empty set
|
||||
|
||||
|
||||
def test_abstract_variant_satisfies_abstract_concrete():
|
||||
# always false since values can be added to the lhs
|
||||
assert not Spec("foo=bar").satisfies("foo:=bar")
|
||||
assert not Spec("foo=bar,baz").satisfies("foo:=bar")
|
||||
assert not Spec("foo=bar,baz").satisfies("foo:=bar,baz")
|
||||
assert not Spec("foo=bar").satisfies("foo:=baz")
|
||||
assert not Spec("foo=bar").satisfies("foo:=bar,baz")
|
||||
assert not Spec("foo=*").satisfies("foo:=bar") # lhs empty set
|
||||
|
||||
|
||||
def test_abstract_variant_satisfies_concrete_concrete():
|
||||
# concrete values only satisfy each other when equal
|
||||
assert Spec("foo:=bar").satisfies("foo:=bar")
|
||||
assert not Spec("foo:=bar,baz").satisfies("foo:=bar")
|
||||
assert not Spec("foo:=bar").satisfies("foo:=bar,baz")
|
||||
assert Spec("foo:=bar,baz").satisfies("foo:=bar,baz")
|
||||
|
||||
|
||||
def test_abstract_variant_intersects_abstract_abstract():
|
||||
# always true since the union of values satisfies both
|
||||
assert Spec("foo=bar").intersects("foo=bar")
|
||||
assert Spec("foo=bar,baz").intersects("foo=bar")
|
||||
assert Spec("foo=bar,baz").intersects("foo=bar,baz")
|
||||
assert Spec("foo=bar").intersects("foo=baz")
|
||||
assert Spec("foo=bar").intersects("foo=bar,baz")
|
||||
assert Spec("foo=bar").intersects("foo=*") # rhs empty set
|
||||
assert Spec("foo=*").intersects("foo=*") # lhs and rhs empty set
|
||||
assert Spec("foo=*").intersects("foo=bar") # lhs empty set, rhs not
|
||||
|
||||
|
||||
def test_abstract_variant_intersects_concrete_abstract():
|
||||
assert Spec("foo:=bar").intersects("foo=bar")
|
||||
assert Spec("foo:=bar,baz").intersects("foo=bar")
|
||||
assert Spec("foo:=bar,baz").intersects("foo=bar,baz")
|
||||
assert not Spec("foo:=bar").intersects("foo=baz") # rhs has at least baz, lhs has not
|
||||
assert not Spec("foo:=bar").intersects("foo=bar,baz") # rhs has at least baz, lhs has not
|
||||
assert Spec("foo:=bar").intersects("foo=*") # rhs empty set
|
||||
|
||||
|
||||
def test_abstract_variant_intersects_abstract_concrete():
|
||||
assert Spec("foo=bar").intersects("foo:=bar")
|
||||
assert not Spec("foo=bar,baz").intersects("foo:=bar") # lhs has at least baz, rhs has not
|
||||
assert Spec("foo=bar,baz").intersects("foo:=bar,baz")
|
||||
assert not Spec("foo=bar").intersects("foo:=baz") # lhs has at least bar, rhs has not
|
||||
assert Spec("foo=bar").intersects("foo:=bar,baz")
|
||||
assert Spec("foo=*").intersects("foo:=bar") # lhs empty set
|
||||
|
||||
|
||||
def test_abstract_variant_intersects_concrete_concrete():
|
||||
# concrete values only intersect each other when equal
|
||||
assert Spec("foo:=bar").intersects("foo:=bar")
|
||||
assert not Spec("foo:=bar,baz").intersects("foo:=bar")
|
||||
assert not Spec("foo:=bar").intersects("foo:=bar,baz")
|
||||
assert Spec("foo:=bar,baz").intersects("foo:=bar,baz")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_abstract_abstract():
|
||||
s1 = Spec("foo=bar")
|
||||
s2 = Spec("foo=*")
|
||||
assert s1.constrain("foo=baz")
|
||||
assert s1 == Spec("foo=bar,baz")
|
||||
assert s2.constrain("foo=baz")
|
||||
assert s2 == Spec("foo=baz")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_abstract_concrete_fail():
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
Spec("foo=bar").constrain("foo:=baz")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_abstract_concrete_ok():
|
||||
s1 = Spec("foo=bar")
|
||||
s2 = Spec("foo=*")
|
||||
assert s1.constrain("foo:=bar") # the change is concreteness
|
||||
assert s1 == Spec("foo:=bar")
|
||||
assert s2.constrain("foo:=bar")
|
||||
assert s2 == Spec("foo:=bar")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_concrete_concrete_fail():
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
Spec("foo:=bar").constrain("foo:=bar,baz")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_concrete_concrete_ok():
|
||||
s = Spec("foo:=bar")
|
||||
assert not s.constrain("foo:=bar") # no change
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_concrete_abstract_fail():
|
||||
s = Spec("foo:=bar")
|
||||
with pytest.raises(UnsatisfiableVariantSpecError):
|
||||
s.constrain("foo=baz")
|
||||
|
||||
|
||||
def test_abstract_variant_constrain_concrete_abstract_ok():
|
||||
s = Spec("foo:=bar,baz")
|
||||
assert not s.constrain("foo=bar") # no change in value or concreteness
|
||||
assert not s.constrain("foo=*")
|
||||
|
||||
|
||||
def test_patches_variant():
|
||||
"""patches=x,y,z is a variant with special satisfies behavior when the rhs is abstract; it
|
||||
allows string prefix matching of the lhs."""
|
||||
assert Spec("patches:=abcdef").satisfies("patches=ab")
|
||||
assert Spec("patches:=abcdef").satisfies("patches=abcdef")
|
||||
assert not Spec("patches:=abcdef").satisfies("patches=xyz")
|
||||
assert Spec("patches:=abcdef,xyz").satisfies("patches=xyz")
|
||||
assert not Spec("patches:=abcdef").satisfies("patches=abcdefghi")
|
||||
|
||||
# but when the rhs is concrete, it must match exactly
|
||||
assert Spec("patches:=abcdef").satisfies("patches:=abcdef")
|
||||
assert not Spec("patches:=abcdef").satisfies("patches:=ab")
|
||||
assert not Spec("patches:=abcdef,xyz").satisfies("patches:=abc,xyz")
|
||||
assert not Spec("patches:=abcdef").satisfies("patches:=abcdefghi")
|
||||
|
||||
|
||||
def test_constrain_narrowing():
|
||||
s = Spec("foo=*")
|
||||
assert s.variants["foo"].type == spack.variant.VariantType.MULTI
|
||||
assert not s.variants["foo"].concrete
|
||||
s.constrain("+foo")
|
||||
assert s.variants["foo"].type == spack.variant.VariantType.BOOL
|
||||
assert s.variants["foo"].concrete
|
||||
|
@ -7,18 +7,27 @@
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
from functools import wraps
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, MutableMapping, Optional, Tuple, Union
|
||||
|
||||
from llnl.path import path_to_os_path, system_path_filter
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe
|
||||
|
||||
import spack.error
|
||||
|
||||
# List is invariant, so List[str] is not a subtype of List[Union[str, pathlib.PurePath]].
|
||||
# Sequence is covariant, but because str itself is a subtype of Sequence[str], we cannot exlude it
|
||||
# in the type hint. So, use an awkward union type to allow (mixed) str and PurePath items.
|
||||
ListOfPaths = Union[List[str], List[pathlib.PurePath], List[Union[str, pathlib.PurePath]]]
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
SYSTEM_PATHS = [
|
||||
"C:\\",
|
||||
@ -61,28 +70,6 @@
|
||||
ModificationList = List[Union["NameModifier", "NameValueModifier"]]
|
||||
|
||||
|
||||
def system_env_normalize(func):
|
||||
"""Decorator wrapping calls to system env modifications,
|
||||
converting all env variable names to all upper case on Windows, no-op
|
||||
on other platforms before calling env modification method.
|
||||
|
||||
Windows, due to a DOS holdover, treats all env variable names case
|
||||
insensitively, however Spack's env modification class does not,
|
||||
meaning setting `Path` and `PATH` would be distinct env operations
|
||||
for Spack, but would cause a collision when actually performing the
|
||||
env modification operations on the env.
|
||||
Normalize all env names to all caps to prevent this collision from the
|
||||
Spack side."""
|
||||
|
||||
@wraps(func)
|
||||
def case_insensitive_modification(self, name: str, *args, **kwargs):
|
||||
if sys.platform == "win32":
|
||||
name = name.upper()
|
||||
return func(self, name, *args, **kwargs)
|
||||
|
||||
return case_insensitive_modification
|
||||
|
||||
|
||||
def is_system_path(path: Path) -> bool:
|
||||
"""Returns True if the argument is a system path, False otherwise."""
|
||||
return bool(path) and (os.path.normpath(path) in SYSTEM_DIRS)
|
||||
@ -251,7 +238,7 @@ class NameModifier:
|
||||
__slots__ = ("name", "separator", "trace")
|
||||
|
||||
def __init__(self, name: str, *, separator: str = os.pathsep, trace: Optional[Trace] = None):
|
||||
self.name = name
|
||||
self.name = name.upper() if sys.platform == "win32" else name
|
||||
self.separator = separator
|
||||
self.trace = trace
|
||||
|
||||
@ -271,9 +258,9 @@ class NameValueModifier:
|
||||
__slots__ = ("name", "value", "separator", "trace")
|
||||
|
||||
def __init__(
|
||||
self, name: str, value: Any, *, separator: str = os.pathsep, trace: Optional[Trace] = None
|
||||
self, name: str, value: str, *, separator: str = os.pathsep, trace: Optional[Trace] = None
|
||||
):
|
||||
self.name = name
|
||||
self.name = name.upper() if sys.platform == "win32" else name
|
||||
self.value = value
|
||||
self.separator = separator
|
||||
self.trace = trace
|
||||
@ -292,6 +279,23 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NamePathModifier(NameValueModifier):
|
||||
"""Base class for modifiers that modify the value of an environment variable
|
||||
that is a path."""
|
||||
|
||||
__slots__ = ("name", "value", "separator", "trace")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
value: Union[str, pathlib.PurePath],
|
||||
*,
|
||||
separator: str = os.pathsep,
|
||||
trace: Optional[Trace] = None,
|
||||
):
|
||||
super().__init__(name, str(value), separator=separator, trace=trace)
|
||||
|
||||
|
||||
class SetEnv(NameValueModifier):
|
||||
__slots__ = ("force", "raw")
|
||||
|
||||
@ -309,17 +313,17 @@ def __init__(
|
||||
self.raw = raw
|
||||
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"SetEnv: {self.name}={str(self.value)}", level=3)
|
||||
env[self.name] = str(self.value)
|
||||
tty.debug(f"SetEnv: {self.name}={self.value}", level=3)
|
||||
env[self.name] = self.value
|
||||
|
||||
|
||||
class AppendFlagsEnv(NameValueModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"AppendFlagsEnv: {self.name}={str(self.value)}", level=3)
|
||||
tty.debug(f"AppendFlagsEnv: {self.name}={self.value}", level=3)
|
||||
if self.name in env and env[self.name]:
|
||||
env[self.name] += self.separator + str(self.value)
|
||||
env[self.name] += self.separator + self.value
|
||||
else:
|
||||
env[self.name] = str(self.value)
|
||||
env[self.name] = self.value
|
||||
|
||||
|
||||
class UnsetEnv(NameModifier):
|
||||
@ -331,7 +335,7 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
|
||||
class RemoveFlagsEnv(NameValueModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"RemoveFlagsEnv: {self.name}-{str(self.value)}", level=3)
|
||||
tty.debug(f"RemoveFlagsEnv: {self.name}-{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
flags = environment_value.split(self.separator) if environment_value else []
|
||||
flags = [f for f in flags if f != self.value]
|
||||
@ -339,33 +343,44 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
|
||||
|
||||
class SetPath(NameValueModifier):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
value: ListOfPaths,
|
||||
*,
|
||||
separator: str = os.pathsep,
|
||||
trace: Optional[Trace] = None,
|
||||
):
|
||||
super().__init__(
|
||||
name, separator.join(str(x) for x in value), separator=separator, trace=trace
|
||||
)
|
||||
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
string_path = self.separator.join(str(item) for item in self.value)
|
||||
tty.debug(f"SetPath: {self.name}={string_path}", level=3)
|
||||
env[self.name] = string_path
|
||||
tty.debug(f"SetPath: {self.name}={self.value}", level=3)
|
||||
env[self.name] = self.value
|
||||
|
||||
|
||||
class AppendPath(NameValueModifier):
|
||||
class AppendPath(NamePathModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"AppendPath: {self.name}+{str(self.value)}", level=3)
|
||||
tty.debug(f"AppendPath: {self.name}+{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories.append(path_to_os_path(os.path.normpath(self.value)).pop())
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class PrependPath(NameValueModifier):
|
||||
class PrependPath(NamePathModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"PrependPath: {self.name}+{str(self.value)}", level=3)
|
||||
tty.debug(f"PrependPath: {self.name}+{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = [path_to_os_path(os.path.normpath(self.value)).pop()] + directories
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class RemoveFirstPath(NameValueModifier):
|
||||
class RemoveFirstPath(NamePathModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"RemoveFirstPath: {self.name}-{str(self.value)}", level=3)
|
||||
tty.debug(f"RemoveFirstPath: {self.name}-{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator)
|
||||
directories = [path_to_os_path(os.path.normpath(x)).pop() for x in directories]
|
||||
@ -375,9 +390,9 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class RemoveLastPath(NameValueModifier):
|
||||
class RemoveLastPath(NamePathModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"RemoveLastPath: {self.name}-{str(self.value)}", level=3)
|
||||
tty.debug(f"RemoveLastPath: {self.name}-{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator)[::-1]
|
||||
directories = [path_to_os_path(os.path.normpath(x)).pop() for x in directories]
|
||||
@ -387,9 +402,9 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
env[self.name] = self.separator.join(directories[::-1])
|
||||
|
||||
|
||||
class RemovePath(NameValueModifier):
|
||||
class RemovePath(NamePathModifier):
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"RemovePath: {self.name}-{str(self.value)}", level=3)
|
||||
tty.debug(f"RemovePath: {self.name}-{self.value}", level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator)
|
||||
directories = [
|
||||
@ -422,6 +437,36 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
def _validate_path_value(name: str, value: Any) -> Union[str, pathlib.PurePath]:
|
||||
"""Ensure the value for an env variable is string or path"""
|
||||
types = (str, pathlib.PurePath)
|
||||
if isinstance(value, types):
|
||||
return value
|
||||
types_str = " or ".join([f"`{t.__name__}`" for t in types])
|
||||
warnings.warn(
|
||||
f"when setting environment variable {name}={value}: value is of type "
|
||||
f"`{type(value).__name__}`, but {types_str} was expected. This is deprecated and will be "
|
||||
f"an error in Spack v1.0",
|
||||
spack.error.SpackAPIWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
return str(value)
|
||||
|
||||
|
||||
def _validate_value(name: str, value: Any) -> str:
|
||||
"""Ensure the value for an env variable is a string"""
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
warnings.warn(
|
||||
f"when setting environment variable {name}={value}: value is of type "
|
||||
f"`{type(value).__name__}`, but `str` was expected. This is deprecated and will be an "
|
||||
"error in Spack v1.0",
|
||||
spack.error.SpackAPIWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
return str(value)
|
||||
|
||||
|
||||
class EnvironmentModifications:
|
||||
"""Keeps track of requests to modify the current environment."""
|
||||
|
||||
@ -473,8 +518,7 @@ def _trace(self) -> Optional[Trace]:
|
||||
|
||||
return Trace(filename=filename, lineno=lineno, context=current_context)
|
||||
|
||||
@system_env_normalize
|
||||
def set(self, name: str, value: str, *, force: bool = False, raw: bool = False):
|
||||
def set(self, name: str, value: str, *, force: bool = False, raw: bool = False) -> None:
|
||||
"""Stores a request to set an environment variable.
|
||||
|
||||
Args:
|
||||
@ -483,11 +527,11 @@ def set(self, name: str, value: str, *, force: bool = False, raw: bool = False):
|
||||
force: if True, audit will not consider this modification a warning
|
||||
raw: if True, format of value string is skipped
|
||||
"""
|
||||
value = _validate_value(name, value)
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force, raw=raw)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def append_flags(self, name: str, value: str, sep: str = " "):
|
||||
def append_flags(self, name: str, value: str, sep: str = " ") -> None:
|
||||
"""Stores a request to append 'flags' to an environment variable.
|
||||
|
||||
Args:
|
||||
@ -495,11 +539,11 @@ def append_flags(self, name: str, value: str, sep: str = " "):
|
||||
value: flags to be appended
|
||||
sep: separator for the flags (default: " ")
|
||||
"""
|
||||
value = _validate_value(name, value)
|
||||
item = AppendFlagsEnv(name, value, separator=sep, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def unset(self, name: str):
|
||||
def unset(self, name: str) -> None:
|
||||
"""Stores a request to unset an environment variable.
|
||||
|
||||
Args:
|
||||
@ -508,8 +552,7 @@ def unset(self, name: str):
|
||||
item = UnsetEnv(name, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def remove_flags(self, name: str, value: str, sep: str = " "):
|
||||
def remove_flags(self, name: str, value: str, sep: str = " ") -> None:
|
||||
"""Stores a request to remove flags from an environment variable
|
||||
|
||||
Args:
|
||||
@ -517,11 +560,11 @@ def remove_flags(self, name: str, value: str, sep: str = " "):
|
||||
value: flags to be removed
|
||||
sep: separator for the flags (default: " ")
|
||||
"""
|
||||
value = _validate_value(name, value)
|
||||
item = RemoveFlagsEnv(name, value, separator=sep, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def set_path(self, name: str, elements: List[str], separator: str = os.pathsep):
|
||||
def set_path(self, name: str, elements: ListOfPaths, separator: str = os.pathsep) -> None:
|
||||
"""Stores a request to set an environment variable to a list of paths,
|
||||
separated by a character defined in input.
|
||||
|
||||
@ -530,11 +573,13 @@ def set_path(self, name: str, elements: List[str], separator: str = os.pathsep):
|
||||
elements: ordered list paths
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
elements = [_validate_path_value(name, x) for x in elements]
|
||||
item = SetPath(name, elements, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def append_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
def append_path(
|
||||
self, name: str, path: Union[str, pathlib.PurePath], separator: str = os.pathsep
|
||||
) -> None:
|
||||
"""Stores a request to append a path to list of paths.
|
||||
|
||||
Args:
|
||||
@ -542,11 +587,13 @@ def append_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
path: path to be appended
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
path = _validate_path_value(name, path)
|
||||
item = AppendPath(name, path, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def prepend_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
def prepend_path(
|
||||
self, name: str, path: Union[str, pathlib.PurePath], separator: str = os.pathsep
|
||||
) -> None:
|
||||
"""Stores a request to prepend a path to list of paths.
|
||||
|
||||
Args:
|
||||
@ -554,11 +601,13 @@ def prepend_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
path: path to be prepended
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
path = _validate_path_value(name, path)
|
||||
item = PrependPath(name, path, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def remove_first_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
def remove_first_path(
|
||||
self, name: str, path: Union[str, pathlib.PurePath], separator: str = os.pathsep
|
||||
) -> None:
|
||||
"""Stores a request to remove first instance of path from a list of paths.
|
||||
|
||||
Args:
|
||||
@ -566,11 +615,13 @@ def remove_first_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
path: path to be removed
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
path = _validate_path_value(name, path)
|
||||
item = RemoveFirstPath(name, path, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def remove_last_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
def remove_last_path(
|
||||
self, name: str, path: Union[str, pathlib.PurePath], separator: str = os.pathsep
|
||||
) -> None:
|
||||
"""Stores a request to remove last instance of path from a list of paths.
|
||||
|
||||
Args:
|
||||
@ -578,11 +629,13 @@ def remove_last_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
path: path to be removed
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
path = _validate_path_value(name, path)
|
||||
item = RemoveLastPath(name, path, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def remove_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
def remove_path(
|
||||
self, name: str, path: Union[str, pathlib.PurePath], separator: str = os.pathsep
|
||||
) -> None:
|
||||
"""Stores a request to remove a path from a list of paths.
|
||||
|
||||
Args:
|
||||
@ -590,11 +643,11 @@ def remove_path(self, name: str, path: str, separator: str = os.pathsep):
|
||||
path: path to be removed
|
||||
separator: separator for the paths (default: os.pathsep)
|
||||
"""
|
||||
path = _validate_path_value(name, path)
|
||||
item = RemovePath(name, path, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def deprioritize_system_paths(self, name: str, separator: str = os.pathsep):
|
||||
def deprioritize_system_paths(self, name: str, separator: str = os.pathsep) -> None:
|
||||
"""Stores a request to deprioritize system paths in a path list,
|
||||
otherwise preserving the order.
|
||||
|
||||
@ -605,8 +658,7 @@ def deprioritize_system_paths(self, name: str, separator: str = os.pathsep):
|
||||
item = DeprioritizeSystemPaths(name, separator=separator, trace=self._trace())
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
def prune_duplicate_paths(self, name: str, separator: str = os.pathsep):
|
||||
def prune_duplicate_paths(self, name: str, separator: str = os.pathsep) -> None:
|
||||
"""Stores a request to remove duplicates from a path list, otherwise
|
||||
preserving the order.
|
||||
|
||||
|
@ -10,7 +10,6 @@
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Callable, Collection, Iterable, List, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.lang as lang
|
||||
@ -33,24 +32,22 @@
|
||||
"target",
|
||||
}
|
||||
|
||||
special_variant_values = [None, "none", "*"]
|
||||
|
||||
|
||||
class VariantType(enum.Enum):
|
||||
class VariantType(enum.IntEnum):
|
||||
"""Enum representing the three concrete variant types."""
|
||||
|
||||
MULTI = "multi"
|
||||
BOOL = "bool"
|
||||
SINGLE = "single"
|
||||
BOOL = 1
|
||||
SINGLE = 2
|
||||
MULTI = 3
|
||||
|
||||
@property
|
||||
def variant_class(self) -> Type:
|
||||
if self is self.MULTI:
|
||||
return MultiValuedVariant
|
||||
elif self is self.BOOL:
|
||||
return BoolValuedVariant
|
||||
else:
|
||||
return SingleValuedVariant
|
||||
def string(self) -> str:
|
||||
"""Convert the variant type to a string."""
|
||||
if self == VariantType.BOOL:
|
||||
return "bool"
|
||||
elif self == VariantType.SINGLE:
|
||||
return "single"
|
||||
return "multi"
|
||||
|
||||
|
||||
class Variant:
|
||||
@ -134,7 +131,7 @@ def isa_type(v):
|
||||
self.sticky = sticky
|
||||
self.precedence = precedence
|
||||
|
||||
def validate_or_raise(self, vspec: "AbstractVariant", pkg_name: str):
|
||||
def validate_or_raise(self, vspec: "VariantValue", pkg_name: str):
|
||||
"""Validate a variant spec against this package variant. Raises an
|
||||
exception if any error is found.
|
||||
|
||||
@ -156,7 +153,7 @@ def validate_or_raise(self, vspec: "AbstractVariant", pkg_name: str):
|
||||
raise InconsistentValidationError(vspec, self)
|
||||
|
||||
# If the value is exclusive there must be at most one
|
||||
value = vspec.value_as_tuple
|
||||
value = vspec.values
|
||||
if not self.multi and len(value) != 1:
|
||||
raise MultipleValuesInExclusiveVariantError(vspec, pkg_name)
|
||||
|
||||
@ -191,27 +188,15 @@ def allowed_values(self):
|
||||
v = docstring if docstring else ""
|
||||
return v
|
||||
|
||||
def make_default(self):
|
||||
"""Factory that creates a variant holding the default value.
|
||||
def make_default(self) -> "VariantValue":
|
||||
"""Factory that creates a variant holding the default value(s)."""
|
||||
variant = VariantValue.from_string_or_bool(self.name, self.default)
|
||||
variant.type = self.variant_type
|
||||
return variant
|
||||
|
||||
Returns:
|
||||
MultiValuedVariant or SingleValuedVariant or BoolValuedVariant:
|
||||
instance of the proper variant
|
||||
"""
|
||||
return self.make_variant(self.default)
|
||||
|
||||
def make_variant(self, value: Union[str, bool]) -> "AbstractVariant":
|
||||
"""Factory that creates a variant holding the value passed as
|
||||
a parameter.
|
||||
|
||||
Args:
|
||||
value: value that will be hold by the variant
|
||||
|
||||
Returns:
|
||||
MultiValuedVariant or SingleValuedVariant or BoolValuedVariant:
|
||||
instance of the proper variant
|
||||
"""
|
||||
return self.variant_type.variant_class(self.name, value)
|
||||
def make_variant(self, *value: Union[str, bool]) -> "VariantValue":
|
||||
"""Factory that creates a variant holding the value(s) passed."""
|
||||
return VariantValue(self.variant_type, self.name, value)
|
||||
|
||||
@property
|
||||
def variant_type(self) -> VariantType:
|
||||
@ -237,27 +222,6 @@ def __str__(self):
|
||||
)
|
||||
|
||||
|
||||
def implicit_variant_conversion(method):
|
||||
"""Converts other to type(self) and calls method(self, other)
|
||||
|
||||
Args:
|
||||
method: any predicate method that takes another variant as an argument
|
||||
|
||||
Returns: decorated method
|
||||
"""
|
||||
|
||||
@functools.wraps(method)
|
||||
def convert(self, other):
|
||||
# We don't care if types are different as long as I can convert other to type(self)
|
||||
try:
|
||||
other = type(self)(other.name, other._original_value, propagate=other.propagate)
|
||||
except (spack.error.SpecError, ValueError):
|
||||
return False
|
||||
return method(self, other)
|
||||
|
||||
return convert
|
||||
|
||||
|
||||
def _flatten(values) -> Collection:
|
||||
"""Flatten instances of _ConditionalVariantValues for internal representation"""
|
||||
if isinstance(values, DisjointSetsOfValues):
|
||||
@ -275,335 +239,244 @@ def _flatten(values) -> Collection:
|
||||
|
||||
|
||||
#: Type for value of a variant
|
||||
ValueType = Union[str, bool, Tuple[Union[str, bool], ...]]
|
||||
ValueType = Tuple[Union[bool, str], ...]
|
||||
|
||||
#: Type of variant value when output for JSON, YAML, etc.
|
||||
SerializedValueType = Union[str, bool, List[Union[str, bool]]]
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class AbstractVariant:
|
||||
"""A variant that has not yet decided who it wants to be. It behaves like
|
||||
a multi valued variant which **could** do things.
|
||||
|
||||
This kind of variant is generated during parsing of expressions like
|
||||
``foo=bar`` and differs from multi valued variants because it will
|
||||
satisfy any other variant with the same name. This is because it **could**
|
||||
do it if it grows up to be a multi valued variant with the right set of
|
||||
values.
|
||||
"""
|
||||
class VariantValue:
|
||||
"""A VariantValue is a key-value pair that represents a variant. It can have zero or more
|
||||
values. Values have set semantics, so they are unordered and unique. The variant type can
|
||||
be narrowed from multi to single to boolean, this limits the number of values that can be
|
||||
stored in the variant. Multi-valued variants can either be concrete or abstract: abstract
|
||||
means that the variant takes at least the values specified, but may take more when concretized.
|
||||
Concrete means that the variant takes exactly the values specified. Lastly, a variant can be
|
||||
marked as propagating, which means that it should be propagated to dependencies."""
|
||||
|
||||
name: str
|
||||
propagate: bool
|
||||
_value: ValueType
|
||||
_original_value: Any
|
||||
concrete: bool
|
||||
type: VariantType
|
||||
_values: ValueType
|
||||
|
||||
def __init__(self, name: str, value: ValueType, propagate: bool = False) -> None:
|
||||
slots = ("name", "propagate", "concrete", "type", "_values")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
type: VariantType,
|
||||
name: str,
|
||||
value: ValueType,
|
||||
*,
|
||||
propagate: bool = False,
|
||||
concrete: bool = False,
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.type = type
|
||||
self.propagate = propagate
|
||||
# only multi-valued variants can be abstract
|
||||
self.concrete = concrete or type in (VariantType.BOOL, VariantType.SINGLE)
|
||||
|
||||
# Invokes property setter
|
||||
self.value = value
|
||||
self.set(*value)
|
||||
|
||||
@staticmethod
|
||||
def from_node_dict(
|
||||
name: str, value: Union[str, List[str]], *, propagate: bool = False
|
||||
) -> "AbstractVariant":
|
||||
name: str, value: Union[str, List[str]], *, propagate: bool = False, abstract: bool = False
|
||||
) -> "VariantValue":
|
||||
"""Reconstruct a variant from a node dict."""
|
||||
if isinstance(value, list):
|
||||
# read multi-value variants in and be faithful to the YAML
|
||||
mvar = MultiValuedVariant(name, (), propagate=propagate)
|
||||
mvar._value = tuple(value)
|
||||
mvar._original_value = mvar._value
|
||||
return mvar
|
||||
return VariantValue(
|
||||
VariantType.MULTI, name, tuple(value), propagate=propagate, concrete=not abstract
|
||||
)
|
||||
|
||||
# todo: is this necessary? not literal true / false in json/yaml?
|
||||
elif str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
|
||||
return BoolValuedVariant(name, value, propagate=propagate)
|
||||
return VariantValue(
|
||||
VariantType.BOOL, name, (str(value).upper() == "TRUE",), propagate=propagate
|
||||
)
|
||||
|
||||
return SingleValuedVariant(name, value, propagate=propagate)
|
||||
return VariantValue(VariantType.SINGLE, name, (value,), propagate=propagate)
|
||||
|
||||
@staticmethod
|
||||
def from_string_or_bool(
|
||||
name: str, value: Union[str, bool], *, propagate: bool = False, concrete: bool = False
|
||||
) -> "VariantValue":
|
||||
if value is True or value is False:
|
||||
return VariantValue(VariantType.BOOL, name, (value,), propagate=propagate)
|
||||
|
||||
elif value.upper() in ("TRUE", "FALSE"):
|
||||
return VariantValue(
|
||||
VariantType.BOOL, name, (value.upper() == "TRUE",), propagate=propagate
|
||||
)
|
||||
|
||||
elif value == "*":
|
||||
return VariantValue(VariantType.MULTI, name, (), propagate=propagate)
|
||||
|
||||
return VariantValue(
|
||||
VariantType.MULTI,
|
||||
name,
|
||||
tuple(value.split(",")),
|
||||
propagate=propagate,
|
||||
concrete=concrete,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_concretizer(name: str, value: str, type: str) -> "VariantValue":
|
||||
"""Reconstruct a variant from concretizer output."""
|
||||
if type == "bool":
|
||||
return VariantValue(VariantType.BOOL, name, (value == "True",))
|
||||
elif type == "multi":
|
||||
return VariantValue(VariantType.MULTI, name, (value,), concrete=True)
|
||||
else:
|
||||
return VariantValue(VariantType.SINGLE, name, (value,))
|
||||
|
||||
def yaml_entry(self) -> Tuple[str, SerializedValueType]:
|
||||
"""Returns a key, value tuple suitable to be an entry in a yaml dict.
|
||||
"""Returns a (key, value) tuple suitable to be an entry in a yaml dict.
|
||||
|
||||
Returns:
|
||||
tuple: (name, value_representation)
|
||||
"""
|
||||
return self.name, list(self.value_as_tuple)
|
||||
if self.type == VariantType.MULTI:
|
||||
return self.name, list(self.values)
|
||||
return self.name, self.values[0]
|
||||
|
||||
@property
|
||||
def value_as_tuple(self) -> Tuple[Union[bool, str], ...]:
|
||||
"""Getter for self.value that always returns a Tuple (even for single valued variants).
|
||||
|
||||
This makes it easy to iterate over possible values.
|
||||
"""
|
||||
if isinstance(self._value, (bool, str)):
|
||||
return (self._value,)
|
||||
return self._value
|
||||
def values(self) -> ValueType:
|
||||
return self._values
|
||||
|
||||
@property
|
||||
def value(self) -> ValueType:
|
||||
"""Returns a tuple of strings containing the values stored in
|
||||
the variant.
|
||||
def value(self) -> Union[ValueType, bool, str]:
|
||||
return self._values[0] if self.type != VariantType.MULTI else self._values
|
||||
|
||||
Returns:
|
||||
tuple: values stored in the variant
|
||||
"""
|
||||
return self._value
|
||||
def set(self, *value: Union[bool, str]) -> None:
|
||||
"""Set the value(s) of the variant."""
|
||||
if len(value) > 1:
|
||||
value = tuple(sorted(set(value)))
|
||||
|
||||
@value.setter
|
||||
def value(self, value: ValueType) -> None:
|
||||
self._value_setter(value)
|
||||
if self.type != VariantType.MULTI:
|
||||
if len(value) != 1:
|
||||
raise MultipleValuesInExclusiveVariantError(self)
|
||||
unwrapped = value[0]
|
||||
if self.type == VariantType.BOOL and unwrapped not in (True, False):
|
||||
raise ValueError(
|
||||
f"cannot set a boolean variant to a value that is not a boolean: {unwrapped}"
|
||||
)
|
||||
|
||||
def _value_setter(self, value: ValueType) -> None:
|
||||
# Store the original value
|
||||
self._original_value = value
|
||||
if "*" in value:
|
||||
raise InvalidVariantValueError("cannot use reserved value '*'")
|
||||
|
||||
if not isinstance(value, (tuple, list)):
|
||||
# Store a tuple of CSV string representations
|
||||
# Tuple is necessary here instead of list because the
|
||||
# values need to be hashed
|
||||
value = tuple(re.split(r"\s*,\s*", str(value)))
|
||||
|
||||
for val in special_variant_values:
|
||||
if val in value and len(value) > 1:
|
||||
msg = "'%s' cannot be combined with other variant" % val
|
||||
msg += " values."
|
||||
raise InvalidVariantValueCombinationError(msg)
|
||||
|
||||
# With multi-value variants it is necessary
|
||||
# to remove duplicates and give an order
|
||||
# to a set
|
||||
self._value = tuple(sorted(set(value)))
|
||||
self._values = value
|
||||
|
||||
def _cmp_iter(self) -> Iterable:
|
||||
yield self.name
|
||||
yield self.propagate
|
||||
yield from (str(v) for v in self.value_as_tuple)
|
||||
yield self.concrete
|
||||
yield from (str(v) for v in self.values)
|
||||
|
||||
def copy(self) -> "AbstractVariant":
|
||||
"""Returns an instance of a variant equivalent to self
|
||||
|
||||
Returns:
|
||||
AbstractVariant: a copy of self
|
||||
|
||||
>>> a = MultiValuedVariant('foo', True)
|
||||
>>> b = a.copy()
|
||||
>>> assert a == b
|
||||
>>> assert a is not b
|
||||
"""
|
||||
return type(self)(self.name, self._original_value, self.propagate)
|
||||
|
||||
@implicit_variant_conversion
|
||||
def satisfies(self, other: "AbstractVariant") -> bool:
|
||||
"""Returns true if ``other.name == self.name``, because any value that
|
||||
other holds and is not in self yet **could** be added.
|
||||
|
||||
Args:
|
||||
other: constraint to be met for the method to return True
|
||||
|
||||
Returns:
|
||||
bool: True or False
|
||||
"""
|
||||
# If names are different then `self` does not satisfy `other`
|
||||
# (`foo=bar` will never satisfy `baz=bar`)
|
||||
return other.name == self.name
|
||||
|
||||
def intersects(self, other: "AbstractVariant") -> bool:
|
||||
"""Returns True if there are variant matching both self and other, False otherwise."""
|
||||
if isinstance(other, (SingleValuedVariant, BoolValuedVariant)):
|
||||
return other.intersects(self)
|
||||
return other.name == self.name
|
||||
|
||||
def compatible(self, other: "AbstractVariant") -> bool:
|
||||
"""Returns True if self and other are compatible, False otherwise.
|
||||
|
||||
As there is no semantic check, two VariantSpec are compatible if
|
||||
either they contain the same value or they are both multi-valued.
|
||||
|
||||
Args:
|
||||
other: instance against which we test compatibility
|
||||
|
||||
Returns:
|
||||
bool: True or False
|
||||
"""
|
||||
# If names are different then `self` is not compatible with `other`
|
||||
# (`foo=bar` is incompatible with `baz=bar`)
|
||||
return self.intersects(other)
|
||||
|
||||
@implicit_variant_conversion
|
||||
def constrain(self, other: "AbstractVariant") -> bool:
|
||||
"""Modify self to match all the constraints for other if both
|
||||
instances are multi-valued. Returns True if self changed,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
other: instance against which we constrain self
|
||||
|
||||
Returns:
|
||||
bool: True or False
|
||||
"""
|
||||
if self.name != other.name:
|
||||
raise ValueError("variants must have the same name")
|
||||
|
||||
old_value = self.value
|
||||
|
||||
values = list(sorted(set(self.value_as_tuple + other.value_as_tuple)))
|
||||
# If we constraint wildcard by another value, just take value
|
||||
if "*" in values and len(values) > 1:
|
||||
values.remove("*")
|
||||
|
||||
self._value_setter(",".join(str(v) for v in values))
|
||||
self.propagate = self.propagate and other.propagate
|
||||
return old_value != self.value
|
||||
|
||||
def __contains__(self, item: Union[str, bool]) -> bool:
|
||||
return item in self.value_as_tuple
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{type(self).__name__}({repr(self.name)}, {repr(self._original_value)})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
delim = "==" if self.propagate else "="
|
||||
values = spack.spec_parser.quote_if_needed(",".join(str(v) for v in self.value_as_tuple))
|
||||
return f"{self.name}{delim}{values}"
|
||||
|
||||
|
||||
class MultiValuedVariant(AbstractVariant):
|
||||
"""A variant that can hold multiple values at once."""
|
||||
|
||||
@implicit_variant_conversion
|
||||
def satisfies(self, other: AbstractVariant) -> bool:
|
||||
"""Returns true if ``other.name == self.name`` and ``other.value`` is
|
||||
a strict subset of self. Does not try to validate.
|
||||
|
||||
Args:
|
||||
other: constraint to be met for the method to return True
|
||||
|
||||
Returns:
|
||||
bool: True or False
|
||||
"""
|
||||
super_sat = super().satisfies(other)
|
||||
|
||||
if not super_sat:
|
||||
return False
|
||||
|
||||
if "*" in other or "*" in self:
|
||||
return True
|
||||
|
||||
# allow prefix find on patches
|
||||
if self.name == "patches":
|
||||
return all(
|
||||
any(str(w).startswith(str(v)) for w in self.value_as_tuple)
|
||||
for v in other.value_as_tuple
|
||||
)
|
||||
|
||||
# Otherwise we want all the values in `other` to be also in `self`
|
||||
return all(v in self for v in other.value_as_tuple)
|
||||
|
||||
def append(self, value: Union[str, bool]) -> None:
|
||||
"""Add another value to this multi-valued variant."""
|
||||
self._value = tuple(sorted((value,) + self.value_as_tuple))
|
||||
self._original_value = ",".join(str(v) for v in self._value)
|
||||
|
||||
def __str__(self) -> str:
|
||||
# Special-case patches to not print the full 64 character sha256
|
||||
if self.name == "patches":
|
||||
values_str = ",".join(str(x)[:7] for x in self.value_as_tuple)
|
||||
else:
|
||||
values_str = ",".join(str(x) for x in self.value_as_tuple)
|
||||
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.spec_parser.quote_if_needed(values_str)}"
|
||||
|
||||
|
||||
class SingleValuedVariant(AbstractVariant):
|
||||
"""A variant that can hold multiple values, but one at a time."""
|
||||
|
||||
def _value_setter(self, value: ValueType) -> None:
|
||||
# Treat the value as a multi-valued variant
|
||||
super()._value_setter(value)
|
||||
|
||||
# Then check if there's only a single value
|
||||
values = self.value_as_tuple
|
||||
if len(values) != 1:
|
||||
raise MultipleValuesInExclusiveVariantError(self)
|
||||
|
||||
self._value = values[0]
|
||||
|
||||
@implicit_variant_conversion
|
||||
def satisfies(self, other: "AbstractVariant") -> bool:
|
||||
abstract_sat = super().satisfies(other)
|
||||
|
||||
return abstract_sat and (
|
||||
self.value == other.value or other.value == "*" or self.value == "*"
|
||||
def copy(self) -> "VariantValue":
|
||||
return VariantValue(
|
||||
self.type, self.name, self.values, propagate=self.propagate, concrete=self.concrete
|
||||
)
|
||||
|
||||
def intersects(self, other: "AbstractVariant") -> bool:
|
||||
return self.satisfies(other)
|
||||
|
||||
def compatible(self, other: "AbstractVariant") -> bool:
|
||||
return self.satisfies(other)
|
||||
|
||||
@implicit_variant_conversion
|
||||
def constrain(self, other: "AbstractVariant") -> bool:
|
||||
def satisfies(self, other: "VariantValue") -> bool:
|
||||
"""The lhs satisfies the rhs if all possible concretizations of lhs are also
|
||||
possible concretizations of rhs."""
|
||||
if self.name != other.name:
|
||||
raise ValueError("variants must have the same name")
|
||||
|
||||
if other.value == "*":
|
||||
return False
|
||||
|
||||
if self.value == "*":
|
||||
self.value = other.value
|
||||
return True
|
||||
|
||||
if self.value != other.value:
|
||||
raise UnsatisfiableVariantSpecError(other.value, self.value)
|
||||
self.propagate = self.propagate and other.propagate
|
||||
if not other.concrete:
|
||||
# rhs abstract means the lhs must at least contain its values.
|
||||
# special-case patches with rhs abstract: their values may be prefixes of the lhs
|
||||
# values.
|
||||
if self.name == "patches":
|
||||
return all(
|
||||
isinstance(v, str)
|
||||
and any(isinstance(w, str) and w.startswith(v) for w in self.values)
|
||||
for v in other.values
|
||||
)
|
||||
return all(v in self for v in other.values)
|
||||
if self.concrete:
|
||||
# both concrete: they must be equal
|
||||
return self.values == other.values
|
||||
return False
|
||||
|
||||
def __contains__(self, item: ValueType) -> bool:
|
||||
return item == self.value
|
||||
def intersects(self, other: "VariantValue") -> bool:
|
||||
"""True iff there exists a concretization that satisfies both lhs and rhs."""
|
||||
if self.name != other.name:
|
||||
return False
|
||||
if self.concrete:
|
||||
if other.concrete:
|
||||
return self.values == other.values
|
||||
return all(v in self for v in other.values)
|
||||
if other.concrete:
|
||||
return all(v in other for v in self.values)
|
||||
# both abstract: the union is a valid concretization of both
|
||||
return True
|
||||
|
||||
def yaml_entry(self) -> Tuple[str, SerializedValueType]:
|
||||
assert isinstance(self.value, (bool, str))
|
||||
return self.name, self.value
|
||||
def constrain(self, other: "VariantValue") -> bool:
|
||||
"""Constrain self with other if they intersect. Returns true iff self was changed."""
|
||||
if not self.intersects(other):
|
||||
raise UnsatisfiableVariantSpecError(self, other)
|
||||
old_values = self.values
|
||||
self.set(*self.values, *other.values)
|
||||
changed = old_values != self.values
|
||||
if self.propagate and not other.propagate:
|
||||
self.propagate = False
|
||||
changed = True
|
||||
if not self.concrete and other.concrete:
|
||||
self.concrete = True
|
||||
changed = True
|
||||
if self.type > other.type:
|
||||
self.type = other.type
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
def append(self, value: Union[str, bool]) -> None:
|
||||
self.set(*self.values, value)
|
||||
|
||||
def __contains__(self, item: Union[str, bool]) -> bool:
|
||||
return item in self.values
|
||||
|
||||
def __str__(self) -> str:
|
||||
# boolean variants are printed +foo or ~foo
|
||||
if self.type == VariantType.BOOL:
|
||||
sigil = "+" if self.value else "~"
|
||||
if self.propagate:
|
||||
sigil *= 2
|
||||
return f"{sigil}{self.name}"
|
||||
|
||||
# concrete multi-valued foo:=bar,baz
|
||||
concrete = ":" if self.type == VariantType.MULTI and self.concrete else ""
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.spec_parser.quote_if_needed(str(self.value))}"
|
||||
|
||||
|
||||
class BoolValuedVariant(SingleValuedVariant):
|
||||
"""A variant that can hold either True or False.
|
||||
|
||||
BoolValuedVariant can also hold the value '*', for coerced
|
||||
comparisons between ``foo=*`` and ``+foo`` or ``~foo``."""
|
||||
|
||||
def _value_setter(self, value: ValueType) -> None:
|
||||
# Check the string representation of the value and turn
|
||||
# it to a boolean
|
||||
if str(value).upper() == "TRUE":
|
||||
self._original_value = value
|
||||
self._value = True
|
||||
elif str(value).upper() == "FALSE":
|
||||
self._original_value = value
|
||||
self._value = False
|
||||
elif str(value) == "*":
|
||||
self._original_value = value
|
||||
self._value = "*"
|
||||
if not self.values:
|
||||
value_str = "*"
|
||||
elif self.name == "patches" and self.concrete:
|
||||
value_str = ",".join(str(x)[:7] for x in self.values)
|
||||
else:
|
||||
msg = 'cannot construct a BoolValuedVariant for "{0}" from '
|
||||
msg += "a value that does not represent a bool"
|
||||
raise ValueError(msg.format(self.name))
|
||||
value_str = ",".join(str(x) for x in self.values)
|
||||
return f"{self.name}{concrete}{delim}{spack.spec_parser.quote_if_needed(value_str)}"
|
||||
|
||||
def __contains__(self, item: ValueType) -> bool:
|
||||
return item is self.value
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"VariantValue({self.type!r}, {self.name!r}, {self.values!r}, "
|
||||
f"propagate={self.propagate!r}, concrete={self.concrete!r})"
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
sigil = "+" if self.value else "~"
|
||||
if self.propagate:
|
||||
sigil *= 2
|
||||
return f"{sigil}{self.name}"
|
||||
|
||||
def MultiValuedVariant(name: str, value: ValueType, propagate: bool = False) -> VariantValue:
|
||||
return VariantValue(VariantType.MULTI, name, value, propagate=propagate, concrete=True)
|
||||
|
||||
|
||||
def SingleValuedVariant(
|
||||
name: str, value: Union[bool, str], propagate: bool = False
|
||||
) -> VariantValue:
|
||||
return VariantValue(VariantType.SINGLE, name, (value,), propagate=propagate)
|
||||
|
||||
|
||||
def BoolValuedVariant(name: str, value: bool, propagate: bool = False) -> VariantValue:
|
||||
return VariantValue(VariantType.BOOL, name, (value,), propagate=propagate)
|
||||
|
||||
|
||||
# The class below inherit from Sequence to disguise as a tuple and comply
|
||||
@ -810,7 +683,7 @@ def __lt__(self, other):
|
||||
|
||||
def prevalidate_variant_value(
|
||||
pkg_cls: "Type[spack.package_base.PackageBase]",
|
||||
variant: AbstractVariant,
|
||||
variant: VariantValue,
|
||||
spec: Optional["spack.spec.Spec"] = None,
|
||||
strict: bool = False,
|
||||
) -> List[Variant]:
|
||||
@ -831,8 +704,8 @@ def prevalidate_variant_value(
|
||||
list of variant definitions that will accept the given value. List will be empty
|
||||
only if the variant is a reserved variant.
|
||||
"""
|
||||
# don't validate wildcards or variants with reserved names
|
||||
if variant.value == ("*",) or variant.name in RESERVED_NAMES or variant.propagate:
|
||||
# do not validate non-user variants or optional variants
|
||||
if variant.name in RESERVED_NAMES or variant.propagate:
|
||||
return []
|
||||
|
||||
# raise if there is no definition at all
|
||||
@ -915,17 +788,13 @@ class MultipleValuesInExclusiveVariantError(spack.error.SpecError, ValueError):
|
||||
only one.
|
||||
"""
|
||||
|
||||
def __init__(self, variant: AbstractVariant, pkg_name: Optional[str] = None):
|
||||
def __init__(self, variant: VariantValue, pkg_name: Optional[str] = None):
|
||||
pkg_info = "" if pkg_name is None else f" in package '{pkg_name}'"
|
||||
msg = f"multiple values are not allowed for variant '{variant.name}'{pkg_info}"
|
||||
|
||||
super().__init__(msg.format(variant, pkg_info))
|
||||
|
||||
|
||||
class InvalidVariantValueCombinationError(spack.error.SpecError):
|
||||
"""Raised when a variant has values '*' or 'none' with other values."""
|
||||
|
||||
|
||||
class InvalidVariantValueError(spack.error.SpecError):
|
||||
"""Raised when variants have invalid values."""
|
||||
|
||||
|
@ -1257,7 +1257,7 @@ def _prev_version(v: StandardVersion) -> StandardVersion:
|
||||
return StandardVersion("", (release, prerelease), separators)
|
||||
|
||||
|
||||
def Version(string: Union[str, int]) -> ConcreteVersion:
|
||||
def Version(string: Union[str, int]) -> Union[StandardVersion, GitVersion]:
|
||||
if not isinstance(string, (str, int)):
|
||||
raise TypeError(f"Cannot construct a version from {type(string)}")
|
||||
string = str(string)
|
||||
|
@ -22,7 +22,7 @@ ci:
|
||||
script::
|
||||
- - if [ -n "$SPACK_EXTRA_MIRROR" ]; then spack mirror add local "${SPACK_EXTRA_MIRROR}/${SPACK_CI_STACK_NAME}"; fi
|
||||
- spack config blame mirrors
|
||||
- - spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
- - spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests --timeout 300 > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
after_script:
|
||||
- - cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
@ -42,7 +42,7 @@ ci:
|
||||
aud: "${OIDC_TOKEN_AUDIENCE}"
|
||||
|
||||
- signing-job:
|
||||
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
|
||||
image: { "name": "ghcr.io/spack/notary:0.0.1", "entrypoint": [""] }
|
||||
tags: ["aws"]
|
||||
script:
|
||||
- - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_BUILDCACHE_DESTINATION}/build_cache /tmp
|
||||
|
@ -5,7 +5,6 @@ ci:
|
||||
- Write-Output "Done"
|
||||
|
||||
before_script::
|
||||
- git config core.autocrlf true
|
||||
- fsutil 8dot3name set C:\ 0
|
||||
- . .\share\spack\setup-env.ps1
|
||||
- If (Test-Path -path C:\\key\intermediate_ci_signing_key.gpg) { spack.ps1 gpg trust C:\\key\intermediate_ci_signing_key.gpg }
|
||||
|
@ -173,11 +173,11 @@ spack:
|
||||
- vtk-m ~openmp # +openmp: https://github.com/spack/spack/issues/31830
|
||||
- zfp
|
||||
# --
|
||||
# - paraview +qt # llvm-17.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - py-cinemasci # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - visit # llvm-17.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - paraview +qt # concretize: paraview: Qt support requires GLX on non Windows; llvm-17.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - py-cinemasci # py-maturin-1.8.3: rust-lld: error: undefined symbol: _intel_fast_memcpy
|
||||
# - visit # vtk-9.2.6: ??
|
||||
# --
|
||||
# - chapel ~cuda ~rocm # llvm-19.1.7: https://github.com/spack/spack/issues/49625
|
||||
# - chapel ~cuda ~rocm # chapel-2.4.0: KeyError: 'intel-oneapi-compilers': /builds/spack/spack/var/spack/repos/builtin/packages/chapel/package.py:602, in setup_chpl_compilers: env.set("CHPL_HOST_COMPILER", self.compiler_map[self.spec.compiler.name])
|
||||
# - cp2k +mpi # dbcsr-2.8.0: FAILED: src/CMakeFiles/dbcsr.dir/dbcsr_api.F-pp.f src/CMakeFiles/dbcsr.dir/dbcsr_api.F.o.ddi:
|
||||
# - dealii # taskflow@3.7.0: cmake: Taskflow currently supports the following compilers: g++ v7.0 or above, clang++ v6.0 or above
|
||||
# - exago +mpi ~ipopt +hiop ~python +raja ^hiop+raja~sparse # raja-0.14.0: RAJA/pattern/kernel/Tile.hpp:174:30: error: no member named 'block_id' in 'IterableTiler<Iterable>'
|
||||
@ -185,8 +185,8 @@ spack:
|
||||
# - fftx # fftx-1.2.0: https://github.com/spack/spack/issues/49621
|
||||
# - fpm # fpm-0.10.0: /tmp/ifx1305151083OkWTRB/ifxqBG60i.i90: error #6405: The same named entity from different modules and/or program units cannot be referenced. [TOML_TABLE]; fpm.F90(32048): error #7002: Error in opening the compiled module file. Check INCLUDE paths. [FPM_MANIFEST_PREPROCESS]
|
||||
# - geopm-runtime # concretize: c-blosc2: conflicts with '%oneapi';
|
||||
# - glvis # llvm-17.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - gptune ~mpispawn # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
- glvis # llvm-17.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - gptune ~mpispawn # py-maturin-1.8.3: rust-lld: error: undefined symbol: __intel_cpu_feature_indicator_x
|
||||
# - lbann # lbann-0.104: https://github.com/spack/spack/issues/49619
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # concretize: c-blosc2: conflicts with '%oneapi';
|
||||
# - nek5000 +mpi ~visit # nek5000-19.0: RuntimeError: Cannot build example: short_tests/eddy.
|
||||
@ -209,8 +209,8 @@ spack:
|
||||
# --
|
||||
# - py-jupyterlab # py-maturin: rust-lld: error: undefined symbol: _intel_fast_memcpy
|
||||
# - py-notebook # py-maturin: rust-lld: error: undefined symbol: _intel_fast_memcpy
|
||||
# - py-numba # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - py-pandas # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
- py-numba # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
- py-pandas # llvm-14.0.6: https://github.com/spack/spack/issues/49625
|
||||
# - py-plotly # py-maturin: rust-lld: error: undefined symbol: _intel_fast_memcpy
|
||||
|
||||
- aml +level_zero
|
||||
|
@ -20,12 +20,14 @@ spack:
|
||||
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +svg +tgeo cxxstd=20
|
||||
celeritas:
|
||||
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
||||
geant4:
|
||||
require: +opengl +qt +threads +x11
|
||||
hip:
|
||||
require: '@5.7.1 +rocm'
|
||||
rivet:
|
||||
require: hepmc=3
|
||||
root:
|
||||
require: +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20
|
||||
require: +arrow ~daos +davix +dcache +emacs +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20
|
||||
# note: root cxxstd=20 not concretizable within sherpa
|
||||
vecgeom:
|
||||
require: +gdml +geant4 +root +shared cxxstd=20
|
||||
@ -66,8 +68,10 @@ spack:
|
||||
- fjcontrib
|
||||
#- garfieldpp
|
||||
- gaudi +aida +examples +heppdt +xercesc ^gdb +debuginfod +python
|
||||
- geant4 +opengl +qt +threads ~vtk ^[virtuals=qmake] qt
|
||||
- geant4 ~vtk ^[virtuals=qmake] qt
|
||||
- geant4 ~vtk ^[virtuals=qmake] qt-base
|
||||
#- genie +atmo
|
||||
- geomodel +examples +fullsimlight +geomodelg4 +hepmc3 +pythia +tools +visualization
|
||||
- hepmc
|
||||
- hepmc3 +interfaces +protobuf +python +rootio
|
||||
#- herwig3 +njet +vbfnlo # Note: herwig3 fails to find evtgen
|
||||
@ -97,6 +101,7 @@ spack:
|
||||
- py-uhi
|
||||
- py-uproot +lz4 +xrootd +zstd
|
||||
- py-vector
|
||||
- py-zfit
|
||||
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
|
||||
- rivet
|
||||
- root ~cuda
|
||||
|
@ -563,14 +563,14 @@ _spack_buildcache() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="push create install list keys check download get-buildcache-name save-specfile sync update-index rebuild-index"
|
||||
SPACK_COMPREPLY="push create install list keys check download save-specfile sync update-index rebuild-index"
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_buildcache_push() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force --unsigned -u --signed --key -k --update-index --rebuild-index --spec-file --only --with-build-dependencies --without-build-dependencies --fail-fast --base-image --tag -t --private -j --jobs"
|
||||
SPACK_COMPREPLY="-h --help -f --force --unsigned -u --signed --key -k --update-index --rebuild-index --only --with-build-dependencies --without-build-dependencies --fail-fast --base-image --tag -t --private -j --jobs"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@ -579,7 +579,7 @@ _spack_buildcache_push() {
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force --unsigned -u --signed --key -k --update-index --rebuild-index --spec-file --only --with-build-dependencies --without-build-dependencies --fail-fast --base-image --tag -t --private -j --jobs"
|
||||
SPACK_COMPREPLY="-h --help -f --force --unsigned -u --signed --key -k --update-index --rebuild-index --only --with-build-dependencies --without-build-dependencies --fail-fast --base-image --tag -t --private -j --jobs"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@ -610,22 +610,18 @@ _spack_buildcache_keys() {
|
||||
_spack_buildcache_check() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file"
|
||||
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_buildcache_download() {
|
||||
SPACK_COMPREPLY="-h --help -s --spec --spec-file -p --path"
|
||||
}
|
||||
|
||||
_spack_buildcache_get_buildcache_name() {
|
||||
SPACK_COMPREPLY="-h --help -s --spec --spec-file"
|
||||
SPACK_COMPREPLY="-h --help -s --spec -p --path"
|
||||
}
|
||||
|
||||
_spack_buildcache_save_specfile() {
|
||||
SPACK_COMPREPLY="-h --help --root-spec --root-specfile -s --specs --specfile-dir"
|
||||
SPACK_COMPREPLY="-h --help --root-spec -s --specs --specfile-dir"
|
||||
}
|
||||
|
||||
_spack_buildcache_sync() {
|
||||
@ -700,7 +696,7 @@ _spack_ci_rebuild_index() {
|
||||
}
|
||||
|
||||
_spack_ci_rebuild() {
|
||||
SPACK_COMPREPLY="-h --help -t --tests --fail-fast -j --jobs"
|
||||
SPACK_COMPREPLY="-h --help -t --tests --fail-fast --timeout -j --jobs"
|
||||
}
|
||||
|
||||
_spack_ci_reproduce_build() {
|
||||
@ -753,7 +749,7 @@ _spack_compiler() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="find add remove rm list info"
|
||||
SPACK_COMPREPLY="find add remove rm list ls info"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -797,6 +793,10 @@ _spack_compiler_list() {
|
||||
SPACK_COMPREPLY="-h --help --scope"
|
||||
}
|
||||
|
||||
_spack_compiler_ls() {
|
||||
SPACK_COMPREPLY="-h --help --scope"
|
||||
}
|
||||
|
||||
_spack_compiler_info() {
|
||||
if $list_options
|
||||
then
|
||||
@ -1177,7 +1177,7 @@ _spack_external() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="find list read-cray-manifest"
|
||||
SPACK_COMPREPLY="find list ls read-cray-manifest"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -1194,6 +1194,10 @@ _spack_external_list() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
}
|
||||
|
||||
_spack_external_ls() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
}
|
||||
|
||||
_spack_external_read_cray_manifest() {
|
||||
SPACK_COMPREPLY="-h --help --file --directory --ignore-default-dir --dry-run --fail-on-error"
|
||||
}
|
||||
@ -1210,7 +1214,7 @@ _spack_fetch() {
|
||||
_spack_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --format -H --hashes --json -I --install-status -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -N --namespaces -r --only-roots -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --only-deprecated --deprecated --install-tree --start-date --end-date"
|
||||
SPACK_COMPREPLY="-h --help --format -H --hashes --json -I --install-status --specfile-format -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -N --namespaces -r --only-roots -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --only-deprecated --deprecated --install-tree --start-date --end-date"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
@ -1903,7 +1907,7 @@ _spack_test() {
|
||||
_spack_test_run() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals -x --explicit --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty"
|
||||
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals -x --explicit --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --timeout --clean --dirty"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
|
@ -693,7 +693,6 @@ complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a list -d
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a keys -d 'get public keys available on mirrors'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a check -d 'check specs against remote binary mirror(s) to see if any need to be rebuilt'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a download -d 'download buildcache entry from a remote mirror to local folder'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a get-buildcache-name -d 'get name (prefix) of buildcache entries for this spec'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a save-specfile -d 'get full spec for dependencies and write them to files in the specified output directory'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a sync -d 'sync binaries (and associated metadata) from one mirror to another'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a update-index -d 'update a buildcache index'
|
||||
@ -702,7 +701,7 @@ complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a
|
||||
complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack buildcache push
|
||||
set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force u/unsigned signed k/key= update-index spec-file= only= with-build-dependencies without-build-dependencies fail-fast base-image= t/tag= private j/jobs=
|
||||
set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force u/unsigned signed k/key= update-index only= with-build-dependencies without-build-dependencies fail-fast base-image= t/tag= private j/jobs=
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache push' -f -k -a '(__fish_spack_specs)'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -d 'show this help message and exit'
|
||||
@ -716,8 +715,6 @@ complete -c spack -n '__fish_spack_using_command buildcache push' -l key -s k -r
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l key -s k -r -d 'key for signing'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l update-index -l rebuild-index -f -a update_index
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l update-index -l rebuild-index -d 'regenerate buildcache index after building package(s)'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l spec-file -r -f -a spec_file
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l spec-file -r -d 'create buildcache entry for spec from json or yaml file'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -f -a 'package dependencies'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -l with-build-dependencies -f -a with_build_dependencies
|
||||
@ -736,7 +733,7 @@ complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -
|
||||
complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
|
||||
|
||||
# spack buildcache create
|
||||
set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force u/unsigned signed k/key= update-index spec-file= only= with-build-dependencies without-build-dependencies fail-fast base-image= t/tag= private j/jobs=
|
||||
set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force u/unsigned signed k/key= update-index only= with-build-dependencies without-build-dependencies fail-fast base-image= t/tag= private j/jobs=
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache create' -f -k -a '(__fish_spack_specs)'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -d 'show this help message and exit'
|
||||
@ -750,8 +747,6 @@ complete -c spack -n '__fish_spack_using_command buildcache create' -l key -s k
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l key -s k -r -d 'key for signing'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l update-index -l rebuild-index -f -a update_index
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l update-index -l rebuild-index -d 'regenerate buildcache index after building package(s)'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l spec-file -r -f -a spec_file
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l spec-file -r -d 'create buildcache entry for spec from json or yaml file'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -f -a 'package dependencies'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache create' -l with-build-dependencies -f -a with_build_dependencies
|
||||
@ -811,7 +806,7 @@ complete -c spack -n '__fish_spack_using_command buildcache keys' -s f -l force
|
||||
complete -c spack -n '__fish_spack_using_command buildcache keys' -s f -l force -d 'force new download of keys'
|
||||
|
||||
# spack buildcache check
|
||||
set -g __fish_spack_optspecs_spack_buildcache_check h/help m/mirror-url= o/output-file= scope= s/spec= spec-file=
|
||||
set -g __fish_spack_optspecs_spack_buildcache_check h/help m/mirror-url= o/output-file= scope=
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 buildcache check' -f -k -a '(__fish_spack_specs)'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -s h -l help -d 'show this help message and exit'
|
||||
@ -821,39 +816,22 @@ complete -c spack -n '__fish_spack_using_command buildcache check' -s o -l outpu
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -s o -l output-file -r -d 'file where rebuild info should be written'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -l scope -r -f -a '_builtin defaults system site user command_line'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -l scope -r -d 'configuration scope containing mirrors to check'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -s s -l spec -r -f -a spec
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -s s -l spec -r -d 'check single spec instead of release specs file'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -l spec-file -r -f -a spec_file
|
||||
complete -c spack -n '__fish_spack_using_command buildcache check' -l spec-file -r -d 'check single spec from json or yaml file instead of release specs file'
|
||||
|
||||
# spack buildcache download
|
||||
set -g __fish_spack_optspecs_spack_buildcache_download h/help s/spec= spec-file= p/path=
|
||||
set -g __fish_spack_optspecs_spack_buildcache_download h/help s/spec= p/path=
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s s -l spec -r -f -a spec
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s s -l spec -r -d 'download built tarball for spec from mirror'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -l spec-file -r -f -a spec_file
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -l spec-file -r -d 'download built tarball for spec (from json or yaml file) from mirror'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s p -l path -r -f -a path
|
||||
complete -c spack -n '__fish_spack_using_command buildcache download' -s p -l path -r -d 'path to directory where tarball should be downloaded'
|
||||
|
||||
# spack buildcache get-buildcache-name
|
||||
set -g __fish_spack_optspecs_spack_buildcache_get_buildcache_name h/help s/spec= spec-file=
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -s s -l spec -r -f -a spec
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -s s -l spec -r -d 'spec string for which buildcache name is desired'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -l spec-file -r -f -a spec_file
|
||||
complete -c spack -n '__fish_spack_using_command buildcache get-buildcache-name' -l spec-file -r -d 'path to spec json or yaml file for which buildcache name is desired'
|
||||
|
||||
# spack buildcache save-specfile
|
||||
set -g __fish_spack_optspecs_spack_buildcache_save_specfile h/help root-spec= root-specfile= s/specs= specfile-dir=
|
||||
set -g __fish_spack_optspecs_spack_buildcache_save_specfile h/help root-spec= s/specs= specfile-dir=
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -l root-spec -r -f -a root_spec
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -l root-spec -r -d 'root spec of dependent spec'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -l root-specfile -r -f -a root_specfile
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -l root-specfile -r -d 'path to json or yaml file containing root spec of dependent spec'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -s s -l specs -r -f -a specs
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -s s -l specs -r -d 'list of dependent specs for which saved yaml is desired'
|
||||
complete -c spack -n '__fish_spack_using_command buildcache save-specfile' -l specfile-dir -r -f -a specfile_dir
|
||||
@ -989,13 +967,15 @@ complete -c spack -n '__fish_spack_using_command ci rebuild-index' -s h -l help
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild-index' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack ci rebuild
|
||||
set -g __fish_spack_optspecs_spack_ci_rebuild h/help t/tests fail-fast j/jobs=
|
||||
set -g __fish_spack_optspecs_spack_ci_rebuild h/help t/tests fail-fast timeout= j/jobs=
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -f -a tests
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -d 'run stand-alone tests after the build'
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -f -a fail_fast
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -d 'stop stand-alone tests after the first failure'
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -l timeout -r -f -a timeout
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -l timeout -r -d 'maximum time (in seconds) that tests are allowed to run'
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -f -a jobs
|
||||
complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
|
||||
|
||||
@ -1074,6 +1054,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a add -d 's
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a remove -d 'remove compiler by spec'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a rm -d 'remove compiler by spec'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a list -d 'list available compilers'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a ls -d 'list available compilers'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler' -f -a info -d 'show compiler paths'
|
||||
complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -d 'show this help message and exit'
|
||||
@ -1133,6 +1114,13 @@ complete -c spack -n '__fish_spack_using_command compiler list' -s h -l help -d
|
||||
complete -c spack -n '__fish_spack_using_command compiler list' -l scope -r -f -a '_builtin defaults system site user command_line'
|
||||
complete -c spack -n '__fish_spack_using_command compiler list' -l scope -r -d 'configuration scope to read from'
|
||||
|
||||
# spack compiler ls
|
||||
set -g __fish_spack_optspecs_spack_compiler_ls h/help scope=
|
||||
complete -c spack -n '__fish_spack_using_command compiler ls' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command compiler ls' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command compiler ls' -l scope -r -f -a '_builtin defaults system site user command_line'
|
||||
complete -c spack -n '__fish_spack_using_command compiler ls' -l scope -r -d 'configuration scope to read from'
|
||||
|
||||
# spack compiler info
|
||||
set -g __fish_spack_optspecs_spack_compiler_info h/help scope=
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 compiler info' -f -a '(__fish_spack_installed_compilers)'
|
||||
@ -1727,6 +1715,7 @@ complete -c spack -n '__fish_spack_using_command extensions' -s s -l show -r -d
|
||||
set -g __fish_spack_optspecs_spack_external h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 external' -f -a find -d 'add external packages to packages.yaml'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 external' -f -a list -d 'list detectable packages, by repository and name'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 external' -f -a ls -d 'list detectable packages, by repository and name'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 external' -f -a read-cray-manifest -d 'consume a Spack-compatible description of externally-installed packages, including dependency relationships'
|
||||
complete -c spack -n '__fish_spack_using_command external' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command external' -s h -l help -d 'show this help message and exit'
|
||||
@ -1756,6 +1745,11 @@ set -g __fish_spack_optspecs_spack_external_list h/help
|
||||
complete -c spack -n '__fish_spack_using_command external list' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command external list' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack external ls
|
||||
set -g __fish_spack_optspecs_spack_external_ls h/help
|
||||
complete -c spack -n '__fish_spack_using_command external ls' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command external ls' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack external read-cray-manifest
|
||||
set -g __fish_spack_optspecs_spack_external_read_cray_manifest h/help file= directory= ignore-default-dir dry-run fail-on-error
|
||||
complete -c spack -n '__fish_spack_using_command external read-cray-manifest' -s h -l help -f -a help
|
||||
@ -1792,7 +1786,7 @@ complete -c spack -n '__fish_spack_using_command fetch' -l deprecated -f -a conf
|
||||
complete -c spack -n '__fish_spack_using_command fetch' -l deprecated -d 'allow concretizer to select deprecated versions'
|
||||
|
||||
# spack find
|
||||
set -g __fish_spack_optspecs_spack_find h/help format= H/hashes json I/install-status d/deps p/paths groups no-groups l/long L/very-long t/tag= N/namespaces r/only-roots c/show-concretized f/show-flags show-full-compiler x/explicit X/implicit u/unknown m/missing v/variants loaded M/only-missing only-deprecated deprecated install-tree= start-date= end-date=
|
||||
set -g __fish_spack_optspecs_spack_find h/help format= H/hashes json I/install-status specfile-format d/deps p/paths groups no-groups l/long L/very-long t/tag= N/namespaces r/only-roots c/show-concretized f/show-flags show-full-compiler x/explicit X/implicit u/unknown m/missing v/variants loaded M/only-missing only-deprecated deprecated install-tree= start-date= end-date=
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 find' -f -a '(__fish_spack_installed_specs)'
|
||||
complete -c spack -n '__fish_spack_using_command find' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command find' -s h -l help -d 'show this help message and exit'
|
||||
@ -1804,6 +1798,8 @@ complete -c spack -n '__fish_spack_using_command find' -l json -f -a json
|
||||
complete -c spack -n '__fish_spack_using_command find' -l json -d 'output specs as machine-readable json records'
|
||||
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -f -a install_status
|
||||
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -d 'show install status of packages'
|
||||
complete -c spack -n '__fish_spack_using_command find' -l specfile-format -f -a specfile_format
|
||||
complete -c spack -n '__fish_spack_using_command find' -l specfile-format -d 'show the specfile format for installed deps '
|
||||
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -f -a deps
|
||||
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -d 'output dependencies along with found specs'
|
||||
complete -c spack -n '__fish_spack_using_command find' -s p -l paths -f -a paths
|
||||
@ -2950,7 +2946,7 @@ complete -c spack -n '__fish_spack_using_command test' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command test' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack test run
|
||||
set -g __fish_spack_optspecs_spack_test_run h/help alias= fail-fast fail-first externals x/explicit keep-stage log-format= log-file= cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= help-cdash clean dirty
|
||||
set -g __fish_spack_optspecs_spack_test_run h/help alias= fail-fast fail-first externals x/explicit keep-stage log-format= log-file= cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= help-cdash timeout= clean dirty
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 test run' -f -a '(__fish_spack_installed_specs)'
|
||||
complete -c spack -n '__fish_spack_using_command test run' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command test run' -s h -l help -d 'show this help message and exit'
|
||||
@ -2977,6 +2973,8 @@ complete -c spack -n '__fish_spack_using_command test run' -l cdash-track -r -f
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l cdash-buildstamp -r -f -a cdash_buildstamp
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -f -a help_cdash
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -d 'show usage instructions for CDash reporting'
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l timeout -r -f -a timeout
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l timeout -r -d 'maximum time (in seconds) that tests are allowed to run'
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l clean -f -a dirty
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l clean -d 'unset harmful variables in the build environment (default)'
|
||||
complete -c spack -n '__fish_spack_using_command test run' -l dirty -f -a dirty
|
||||
|
@ -11,7 +11,7 @@ class BuildEnvCompilerVarB(Package):
|
||||
url = "https://www.example.com"
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("CC", "this-should-be-dropped")
|
||||
env.set("CXX", "this-should-be-dropped")
|
||||
env.set("FC", "this-should-be-dropped")
|
||||
|
@ -22,5 +22,5 @@ def install(self, spec, prefix):
|
||||
mkdirp(prefix)
|
||||
touch(join_path(prefix, "dummyfile"))
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("FOOBAR", self.name)
|
||||
|
@ -67,7 +67,9 @@ def setup_build_environment(self, spack_env):
|
||||
"link arg on dependency spec not readable from " "setup_build_environment.",
|
||||
)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: Spec
|
||||
) -> None:
|
||||
spack_cc # Ensure spack module-scope variable is avaiable
|
||||
check(
|
||||
from_cmake == "from_cmake",
|
||||
|
@ -36,11 +36,13 @@ class Cmake(Package):
|
||||
url="https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz",
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
spack_cc # Ensure spack module-scope variable is avaiable
|
||||
env.set("for_install", "for_install")
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: Spec
|
||||
) -> None:
|
||||
spack_cc # Ensure spack module-scope variable is avaiable
|
||||
env.set("from_cmake", "from_cmake")
|
||||
|
||||
|
@ -23,5 +23,5 @@ def install(self, spec, prefix):
|
||||
make()
|
||||
make("install")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("FOOBAR", self.name)
|
||||
|
@ -24,5 +24,5 @@ def install(self, spec, prefix):
|
||||
make()
|
||||
make("install")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("FOOBAR", self.name)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user