Compare commits

..

27 Commits

Author SHA1 Message Date
Gregory Becker
559ace64e1 reporters wip: working for installs
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-11 15:47:21 -07:00
Gregory Becker
a2441f4656 fixup after rebase
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-09 10:19:29 -07:00
kshea21
dcdbbd235d style 2025-04-09 09:49:41 -07:00
Gregory Becker
be03df1b86 fix overwrite test 2025-04-09 09:49:40 -07:00
kshea21
e3d3230c9b new mock packages 2025-04-09 09:49:40 -07:00
Gregory Becker
e687bb902f cleanup from debugging 2025-04-09 09:49:40 -07:00
Gregory Becker
f1638365a9 refactor transaction across start/complete 2025-04-09 09:49:40 -07:00
Gregory Becker
4e9547703c overwrite tests: work on macos 2025-04-09 09:49:38 -07:00
kshea21
56df316cc2 overwrite install refactoring and tests 2025-04-09 09:48:10 -07:00
kshea21
0cfd514c0c spack commands --update-completion 2025-04-09 09:47:22 -07:00
Todd Gamblin
92e1b1795b fix style issues and refactor a bit
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:47:22 -07:00
kshea21
d8362d02c7 pushing to rebase 2025-04-09 09:47:22 -07:00
kshea21
57a5722ba7 some style things 2025-04-09 09:47:19 -07:00
Todd Gamblin
1e5e416df0 docs: add nitpick ignore for internal multiprocessing.context.Process class
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:21 -07:00
Todd Gamblin
8d0923f29e add quotes to type annotation
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:20 -07:00
kshea21
4a153a185b refactors and test fixes 2025-04-09 09:46:18 -07:00
kshea21
2da51eaec7 failing overwrite build deps 2025-04-09 09:44:01 -07:00
kshea21
974d10f32b added no_op state tracking 2025-04-09 09:41:56 -07:00
kshea21
1a19c09c55 style 2025-04-09 09:41:56 -07:00
kshea21
11572c0caf multi and single tests 2025-04-09 09:41:56 -07:00
kshea21
5d7ba48b36 style 2025-04-09 09:41:53 -07:00
kshea21
fa088e4975 review changes 2025-04-09 09:40:38 -07:00
kshea21
9d0133a771 process handle back 2025-04-09 09:38:37 -07:00
kshea21
0b1e388a0f remove process handle 2025-04-09 09:38:35 -07:00
kshea21
60380a4724 untest -> test 2025-04-09 09:37:38 -07:00
kshea21
03a3546f14 style 2025-04-09 09:37:38 -07:00
kshea21
0808fd1a44 Rebasing -p/--concurrent-packages on develop 2025-04-09 09:37:32 -07:00
12416 changed files with 171908 additions and 174967 deletions

View File

@@ -28,7 +28,7 @@ max-line-length = 99
# - F821: undefined name `name`
#
per-file-ignores =
var/spack/*/package.py:F403,F405,F821
var/spack/repos/*/package.py:F403,F405,F821
*-ci-package.py:F403,F405,F821
# exclude things we usually do not want linting for.

3
.gitattributes vendored
View File

@@ -1,3 +1,4 @@
*.py diff=python
*.lp linguist-language=Prolog
lib/spack/external/* linguist-vendored
*.bat text eol=crlf
*.bat text eol=crlf

View File

@@ -26,7 +26,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static gawk
cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }}
run: |

View File

@@ -42,17 +42,17 @@ jobs:
# built-in repository or documentation
filters: |
bootstrap:
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**'
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**'
- 'var/spack/repos/spack_repo/builtin/packages/python/**'
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**'
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**'
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**'
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**'
- 'var/spack/repos/spack_repo/builtin/packages/npth/**'
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**'
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
- 'var/spack/repos/builtin/packages/clingo/**'
- 'var/spack/repos/builtin/packages/python/**'
- 'var/spack/repos/builtin/packages/re2c/**'
- 'var/spack/repos/builtin/packages/gnupg/**'
- 'var/spack/repos/builtin/packages/libassuan/**'
- 'var/spack/repos/builtin/packages/libgcrypt/**'
- 'var/spack/repos/builtin/packages/libgpg-error/**'
- 'var/spack/repos/builtin/packages/libksba/**'
- 'var/spack/repos/builtin/packages/npth/**'
- 'var/spack/repos/builtin/packages/pinentry/**'
- 'lib/spack/**'
- 'share/spack/**'
- '.github/workflows/bootstrap.yml'

View File

@@ -34,7 +34,7 @@ jobs:
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
run: |
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
@@ -65,11 +65,7 @@ jobs:
python_version: '3.13'
verify-checksums:
# do not run if the commit message or PR description contains [skip-verify-checksums]
if: >-
${{ inputs.with_packages == 'true' &&
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
if: ${{ inputs.with_packages == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29

View File

@@ -5,4 +5,4 @@ isort==6.0.1
mypy==1.15.0
types-six==1.17.0.20250403
vermin==1.6.0
pylint==3.3.7
pylint==3.3.6

View File

@@ -46,42 +46,18 @@ See the
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
for examples and highlights.
Installation
----------------
To install spack, first make sure you have Python & Git.
To install spack and your first package, make sure you have Python & Git.
Then:
```bash
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
```
<details>
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
<br>
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
$ cd spack/bin
$ ./spack install zlib
> [!TIP]
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
>
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
</details>
```bash
# For bash/zsh/sh
. spack/share/spack/setup-env.sh
# For tcsh/csh
source spack/share/spack/setup-env.csh
# For fish
. spack/share/spack/setup-env.fish
```
```bash
# Now you're ready to install a package!
spack install zlib-ng
```
Documentation
----------------

View File

@@ -90,9 +90,10 @@ config:
misc_cache: $user_cache_path/cache
# Abort downloads after this many seconds if not data is received.
# Setting this to 0 will disable the timeout.
connect_timeout: 30
# Timeout in seconds used for downloading sources etc. This only applies
# to the connection phase and can be increased for slow connections or
# servers. 0 means no timeout.
connect_timeout: 10
# If this is false, tools like curl that use SSL will not verify

View File

@@ -25,8 +25,6 @@ packages:
glu: [apple-glu]
unwind: [apple-libunwind]
uuid: [apple-libuuid]
apple-clang:
buildable: false
apple-gl:
buildable: false
externals:

View File

@@ -72,8 +72,6 @@ packages:
permissions:
read: world
write: user
cce:
buildable: false
cray-fftw:
buildable: false
cray-libsci:
@@ -88,23 +86,13 @@ packages:
buildable: false
essl:
buildable: false
fj:
buildable: false
fujitsu-mpi:
buildable: false
fujitsu-ssl2:
buildable: false
glibc:
buildable: false
hpcx-mpi:
buildable: false
iconv:
prefer: [libiconv]
mpt:
buildable: false
musl:
buildable: false
spectrum-mpi:
buildable: false
xl:
buildable: false

View File

@@ -11,4 +11,4 @@
# ~/.spack/repos.yaml
# -------------------------------------------------------------------------
repos:
- $spack/var/spack/repos/spack_repo/builtin
- $spack/var/spack/repos/builtin

View File

@@ -23,5 +23,3 @@ packages:
mpi:
require:
- one_of: [msmpi]
msvc:
buildable: false

View File

@@ -1916,7 +1916,7 @@ diagnostics. Issues, if found, are reported to stdout:
PKG-DIRECTIVES: 1 issue found
1. lammps: wrong variant in "conflicts" directive
the variant 'adios' does not exist
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
------------

View File

@@ -83,7 +83,7 @@ packages. You can quickly find examples by running:
.. code-block:: console
$ cd var/spack/repos/spack_repo/builtin/packages
$ cd var/spack/repos/builtin/packages
$ grep -l QMakePackage */package.py

View File

@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
Spack has a number of built-in bundle packages, such as:
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
``Libc`` is a virtual bundle package for the C standard library.

View File

@@ -199,7 +199,7 @@ a variant to control this:
However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are
missing or replaced. For example, the
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
package overrides the default variant with:
.. code-block:: python

View File

@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
focuses mostly on how Spack's build systems work.
In this guide, we will be using the
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
packages as examples. ``perl``'s build system is a hand-written
``Configure`` shell script, while ``cmake`` bootstraps itself during
installation. Both of these packages require custom build systems.

View File

@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
.. code-block:: python
def setup_build_environment(self, env: EnvironmentModifications) -> None:
def setup_build_environment(self, env):
env.set("PREFIX", prefix)
env.set("BLASLIB", spec["blas"].libs.ld_flags)
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
is a good example of a simple package that does this, while
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
is a good example of a more complex package.
""""""""""""""""""""""
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
]
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
is a good example of a package that uses this strategy.
"""""""""""""
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
is a good example of a package that involves editing a Makefile to set
the appropriate variables.
@@ -192,7 +192,7 @@ well for storing variables:
inc.write(f"{key} = {config[key]}\n")
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
is a good example of a package that uses a dictionary to store
configuration variables.
@@ -213,7 +213,7 @@ them in a list:
inc.write(f"{var}\n")
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
is a good example of a package that uses a list to store
configuration variables.

View File

@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
If it isn't on CRAN, try Bioconductor, another common R repository.
For the purposes of this tutorial, we will be walking through
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
as an example. If you search for "CRAN caret", you will quickly find what
you are looking for at https://cran.r-project.org/package=caret.
https://cran.r-project.org is the main CRAN website. However, CRAN also
@@ -337,7 +337,7 @@ Non-R dependencies
^^^^^^^^^^^^^^^^^^
Some packages depend on non-R libraries for linking. Check out the
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
package for an example: https://cloud.r-project.org/package=stringi.
If you search for the text "SystemRequirements", you will see:
@@ -352,7 +352,7 @@ Passing arguments to the installation
Some R packages provide additional flags that can be passed to
``R CMD INSTALL``, often to locate non-R dependencies.
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
is an example of this, and flags for linking to an MPI library. To pass
these to the installation command, you can override ``configure_args``
like so:

View File

@@ -104,10 +104,10 @@ Finding available options
The first place to start when looking for a list of valid options to
build a package is ``scons --help``. Some packages like
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
don't bother overwriting the default SCons help message, so this isn't
very useful, but other packages like
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
print a list of valid command-line variables:
.. code-block:: console
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
More advanced packages like
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
use ``scons --help`` to print a list of subcommands:
.. code-block:: console

View File

@@ -206,6 +206,7 @@ def setup(sphinx):
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
("py:class", "multiprocessing.context.Process"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),
@@ -225,14 +226,8 @@ def setup(sphinx):
("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"),
("py:class", "llnl.util.lang.K"),
("py:class", "llnl.util.lang.V"),
("py:class", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.K"),
("py:obj", "llnl.util.lang.V"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
Modified files:
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
var/spack/repos/builtin/packages/hdf5/package.py
var/spack/repos/builtin/packages/hdf/package.py
var/spack/repos/builtin/packages/netcdf/package.py
=======================================================
Flake8 checks were clean.
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
.. code-block:: console
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
Flake8 found errors.
Most of the error messages are straightforward, but if you don't understand what
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
.. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
If you're using a ``python`` from Spack and you installed
``py-sphinx`` and friends, you need to make them available to your
``python``. The easiest way to do this is to run:

View File

@@ -154,7 +154,9 @@ Package-related modules
:mod:`spack.util.naming`
Contains functions for mapping between Spack package names,
Python module names, and Python class names.
Python module names, and Python class names. Functions like
:func:`~spack.util.naming.mod_to_class` handle mapping package
module names to class names.
:mod:`spack.directives`
*Directives* are functions that can be called inside a package definition

View File

@@ -1,34 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _env-vars-yaml:
=============================================
Environment Variable Settings (env_vars.yaml)
=============================================
Spack allows you to include shell environment variable modifications
for a spack environment by including an ``env_vars.yaml``. Environment
varaibles can be modified by setting, unsetting, appending, and prepending
variables in the shell environment.
The changes to the shell environment will take effect when the spack
environment is activated.
for example,
.. code-block:: yaml
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"

View File

@@ -1000,28 +1000,6 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages
and constraints.
-------------------------------
Modifying Environment Variables
-------------------------------
Spack Environments can modify the active shell's environment variables when activated. The environment can be
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
file:
.. code-block:: yaml
spack:
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"
-----------------
Environment Views

View File

@@ -131,7 +131,7 @@ creates a simple python file:
It doesn't take much python coding to get from there to a working
package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:lines: 5-
Spack also provides wrapper functions around common commands like

View File

@@ -75,7 +75,6 @@ or refer to the full manual below.
packages_yaml
build_settings
environments
env_vars_yaml
containers
mirrors
module_file_support

View File

@@ -128,7 +128,7 @@ depend on the spec:
.. code-block:: python
def setup_run_environment(self, env: EnvironmentModifications) -> None:
def setup_run_environment(self, env):
if self.spec.satisfies("+foo"):
env.set("FOO", "bar")
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
.. code-block:: python
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
def setup_dependent_run_environment(self, env, dependent_spec):
if dependent_spec.package.extends(self.spec):
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)

View File

@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
Examples where bundle packages can be useful include defining suites of
applications (e.g, `EcpProxyApps
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
These versioned packages primarily consist of dependencies on the associated
software packages. They can include :ref:`variants <variants>` to ensure
@@ -443,7 +443,7 @@ lives in:
.. code-block:: console
$ spack location -p gmp
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
but ``spack edit`` provides a much simpler shortcut and saves you the
trouble of typing the full path.
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
handles creating package files for you, so you can skip most of the
details here.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``var/spack/repos/spack_repo/builtin/packages``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``var/spack/repos/builtin/packages``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Spack installation directory is structured like a standard UNIX
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
If you ``cd`` to that directory, you will see directories for each
package:
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
:shell:
:ellipsis: 10
@@ -479,7 +479,7 @@ package lives in:
.. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
Alongside the ``package.py`` file, a package may contain extra
directories or files (like patches) that it needs to build.
@@ -492,7 +492,7 @@ Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. For example, here is
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
.. code-block:: python
:linenos:
@@ -520,7 +520,7 @@ these:
$ spack install libelf@0.8.13
Spack sees the package name in the spec and looks for
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
Likewise, if you run ``spack install py-numpy``, Spack looks for
``py-numpy/package.py``.
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
In order to handle this, you can define a ``url_for_version()`` function
like so:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.url_for_version
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
takes care of all of the plumbing and requires packagers to just define a proper
``gnu_mirror_path`` attribute:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
:lines: 9-18
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1995,7 +1995,7 @@ structure like this:
.. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
$SPACK_ROOT/var/spack/repos/builtin/packages/
mvapich2/
package.py
ad_lustre_rwcontig_open_source.patch
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
.. _pyside-patch:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
:pyobject: PyPyside.patch
:linenos:
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
$ spack resource show 3877ab54
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
applies to: builtin.m4
``spack resource show`` looks up downloadable resources from package
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
$ spack resource show b37164268
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
applies to: builtin.boost
patched by: builtin.dealii
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment
:linenos:
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
example of this can be found in the ``Python`` package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package
:linenos:
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
build system specific helper methods or attributes to provide, for instance,
configure arguments:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
:pyobject: M4.configure_args
:linenos:
@@ -4110,7 +4110,7 @@ Shell command functions
Recall the install method from ``libelf``:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:pyobject: Libelf.install
:linenos:
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
additional properties on it to help you out. E.g., in openmpi, you'll
find this:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.setup_dependent_package
That code allows the ``openmpi`` package to associate an ``mpicc`` property
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
* - Parent/Provider Package
- Stand-alone Tests
* - `C
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
- Compiles ``hello.c`` and runs it
* - `Cxx
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
- Compiles and runs several ``hello`` programs
* - `Fortran
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
* - `Mpi
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
* - :ref:`PythonPackage <pythonpackage>`
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
One example of a package that adds its own stand-alone tests to those
"inherited" by the virtual package it provides an implementation for is
the `Openmpi package
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
Below are snippets from running and viewing the stand-alone test results
for ``openmpi``:

View File

@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
=================================
Spack comes with thousands of built-in package recipes in
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
``var/spack/repos/builtin/``. This is a **package repository** -- a
directory that Spack searches when it needs to find a package by name.
You may need to maintain packages for restricted, proprietary or
experimental software separately from the built-in repository. Spack
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
.. code-block:: yaml
repos:
- $spack/var/spack/repos/spack_repo/builtin
- $spack/var/spack/repos/builtin
The file starts with ``repos:`` and contains a single ordered list of
paths to repositories. Each path is on a separate line starting with
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
.. code-block:: yaml
repos:
- /opt/repos/spack_repo/local_repo
- $spack/var/spack/repos/spack_repo/builtin
- /opt/local-repo
- $spack/var/spack/repos/builtin
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
it searches these repositories in order (first to last) to resolve each
package name. In this example, Spack will look for the following
packages and use the first valid file:
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
1. ``/opt/local-repo/packages/mpich/package.py``
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
.. note::
@@ -101,15 +101,14 @@ Namespaces
Every repository in Spack has an associated **namespace** defined in its
top-level ``repo.yaml`` file. If you look at
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
see that its namespace is ``builtin``:
.. code-block:: console
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
$ cat var/spack/repos/builtin/repo.yaml
repo:
namespace: builtin
api: v2.0
Spack records the repository namespace of each installed package. For
example, if you install the ``mpich`` package from the ``builtin`` repo,
@@ -218,15 +217,15 @@ Suppose you have three repositories: the builtin Spack repo
repo containing your own prototype packages (``proto``). Suppose they
contain packages as follows:
+--------------+-----------------------------------------------+-----------------------------+
| Namespace | Path to repo | Packages |
+==============+===============================================+=============================+
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
+--------------+-----------------------------------------------+-----------------------------+
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
+--------------+-----------------------------------------------+-----------------------------+
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
+--------------+-----------------------------------------------+-----------------------------+
+--------------+------------------------------------+-----------------------------+
| Namespace | Path to repo | Packages |
+==============+====================================+=============================+
| ``proto`` | ``~/proto`` | ``mpich`` |
+--------------+------------------------------------+-----------------------------+
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
+--------------+------------------------------------+-----------------------------+
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
+--------------+------------------------------------+-----------------------------+
Suppose that ``hdf5`` depends on ``mpich``. You can override the
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
@@ -234,8 +233,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
.. code-block:: yaml
repos:
- /usr/local/repos/spack_repo/llnl
- $spack/var/spack/repos/spack_repo/builtin
- /usr/local/llnl
- $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
@@ -244,9 +243,9 @@ If, instead, ``repos.yaml`` looks like this:
.. code-block:: yaml
repos:
- ~/my_spack_repos/spack_repo/proto
- /usr/local/repos/spack_repo/llnl
- $spack/var/spack/repos/spack_repo/builtin
- ~/proto
- /usr/local/llnl
- $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
@@ -327,8 +326,8 @@ files, use ``spack repo list``.
$ spack repo list
==> 2 package repositories.
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
myrepo ~/myrepo
builtin ~/spack/var/spack/repos/builtin
Each repository is listed with its associated namespace. To get the raw,
merged YAML from all configuration files, use ``spack config get repos``:
@@ -336,9 +335,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
.. code-block:: console
$ spack config get repos
repos:
- ~/my_spack_repos/spack_repo/myrepo
- $spack/var/spack/repos/spack_repo/builtin
repos:srepos:
- ~/myrepo
- $spack/var/spack/repos/builtin
Note that, unlike ``spack repo list``, this does not include the
namespace, which is read from each repo's ``repo.yaml``.
@@ -352,54 +351,66 @@ yourself; you can use the ``spack repo create`` command.
.. code-block:: console
$ spack repo create ~/my_spack_repos myrepo
$ spack repo create myrepo
==> Created repo with namespace 'myrepo'.
==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/myrepo
spack repo add ~/myrepo
$ ls ~/my_spack_repos/spack_repo/myrepo
$ ls myrepo
packages/ repo.yaml
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: 'myrepo'
api: v2.0
Namespaces can also be nested, which can be useful if you have
multiple package repositories for an organization. Spack will
create the corresponding directory structure for you:
By default, the namespace of a new repo matches its directory's name.
You can supply a custom namespace with a second argument, e.g.:
.. code-block:: console
$ spack repo create ~/my_spack_repos llnl.comp
$ spack repo create myrepo llnl.comp
==> Created repo with namespace 'llnl.comp'.
==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
spack repo add ~/myrepo
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: 'llnl.comp'
api: v2.0
You can also create repositories with custom structure with the ``-d/--subdirectory``
argument, e.g.:
.. code-block:: console
$ spack repo create -d applications myrepo apps
==> Created repo with namespace 'apps'.
==> To register it with Spack, run this command:
spack repo add ~/myrepo
$ ls myrepo
applications/ repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: apps
subdirectory: applications
^^^^^^^^^^^^^^^^^^
``spack repo add``
^^^^^^^^^^^^^^^^^^
Once your repository is created, you can register it with Spack with
``spack repo add``. You nee to specify the path to the directory that
contains the ``repo.yaml`` file.
``spack repo add``:
.. code-block:: console
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
$ spack repo add ./myrepo
==> Added repo with namespace 'llnl.comp'.
$ spack repo list
==> 2 package repositories.
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
llnl.comp ~/myrepo
builtin ~/spack/var/spack/repos/builtin
This simply adds the repo to your ``repos.yaml`` file.
@@ -421,43 +432,46 @@ By namespace:
.. code-block:: console
$ spack repo rm llnl.comp
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
$ spack repo list
==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin
builtin ~/spack/var/spack/repos/builtin
By path:
.. code-block:: console
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
$ spack repo rm ~/myrepo
==> Removed repository ~/myrepo
$ spack repo list
==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin
builtin ~/spack/var/spack/repos/builtin
--------------------------------
Repo namespaces and Python
--------------------------------
Package repositories are implemented as Python packages. To be precise,
they are `namespace packages
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
with ``spack_repo`` the top-level namespace, followed by the repository
namespace as submodules. For example, the builtin repository corresponds
to the Python module ``spack_repo.builtin.packages``.
You may have noticed that namespace notation for repositories is similar
to the notation for namespaces in Python. As it turns out, you *can*
treat Spack repositories like Python packages; this is how they are
implemented.
This structure allows you to extend a ``builtin`` package in your own
You could, for example, extend a ``builtin`` package in your own
repository:
.. code-block:: python
from spack_repo.builtin.packages.mpich.package import Mpich
from spack.pkg.builtin.mpich import Mpich
class MyPackage(Mpich):
...
Spack populates ``sys.path`` at runtime with the path to the root of your
package repository's ``spack_repo`` directory.
Spack repo namespaces are actually Python namespaces tacked on under
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
implemented using Python's built-in `sys.path
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.

View File

@@ -5,7 +5,7 @@ sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1
docutils==0.21.2
pygments==2.19.1
urllib3==2.4.0
urllib3==2.3.0
pytest==8.3.5
isort==6.0.1
black==25.1.0

View File

@@ -11,7 +11,6 @@
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
* Usage: dependency of macholib
* Version: 0.17.3
* License: MIT
archspec
--------
@@ -19,7 +18,6 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
* License: Apache-2.0 or MIT
astunparse
----------------
@@ -27,7 +25,6 @@
* Homepage: https://github.com/simonpercivall/astunparse
* Usage: Unparsing Python ASTs for package hashes in Spack
* Version: 1.6.3 (plus modifications)
* License: PSF-2.0
* Note: This is in ``spack.util.unparse`` because it's very heavily
modified, and we want to track coverage for it.
Specifically, we have modified this library to generate consistent unparsed ASTs
@@ -44,7 +41,6 @@
* Homepage: https://github.com/python-attrs/attrs
* Usage: Needed by jsonschema.
* Version: 22.1.0
* License: MIT
ctest_log_parser
----------------
@@ -52,7 +48,6 @@
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
* Usage: Functions to parse build logs and extract error messages.
* Version: Unversioned
* License: BSD-3-Clause
* Note: This is a homemade port of Kitware's CTest build handler.
distro
@@ -61,7 +56,6 @@
* Homepage: https://pypi.python.org/pypi/distro
* Usage: Provides a more stable linux distribution detection.
* Version: 1.8.0
* License: Apache-2.0
jinja2
------
@@ -69,7 +63,6 @@
* Homepage: https://pypi.python.org/pypi/Jinja2
* Usage: A modern and designer-friendly templating language for Python.
* Version: 3.0.3 (last version supporting Python 3.6)
* License: BSD-3-Clause
jsonschema
----------
@@ -77,7 +70,6 @@
* Homepage: https://pypi.python.org/pypi/jsonschema
* Usage: An implementation of JSON Schema for Python.
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
* License: MIT
* Note: We don't include tests or benchmarks; just what Spack needs.
macholib
@@ -86,7 +78,6 @@
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
* Version: 1.16.2
* License: MIT
markupsafe
----------
@@ -94,7 +85,6 @@
* Homepage: https://pypi.python.org/pypi/MarkupSafe
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
* Version: 2.0.1 (last version supporting Python 3.6)
* License: BSD-3-Clause
pyrsistent
----------
@@ -102,7 +92,6 @@
* Homepage: http://github.com/tobgu/pyrsistent/
* Usage: Needed by `jsonschema`
* Version: 0.18.0
* License: MIT
ruamel.yaml
------
@@ -112,7 +101,6 @@
actively maintained and has more features, including round-tripping
comments read from config files.
* Version: 0.17.21
* License: MIT
six
---
@@ -120,6 +108,5 @@
* Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities.
* Version: 1.16.0
* License: MIT
"""

View File

@@ -764,7 +764,7 @@ def copy_tree(
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
raise OSError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
@@ -1030,6 +1030,9 @@ def replace_directory_transaction(directory_name):
Returns:
temporary directory where ``directory_name`` has been moved
"""
for a, b, c in os.walk(directory_name):
print("PRE", a, b, c)
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
directory_name = os.path.abspath(directory_name)
@@ -1048,6 +1051,7 @@ def replace_directory_transaction(directory_name):
try:
yield backup_dir
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
print("hitting the proper exception block")
# Try to recover the original directory, if this fails, raise a
# composite exception.
try:
@@ -1056,11 +1060,15 @@ def replace_directory_transaction(directory_name):
shutil.rmtree(directory_name)
os.rename(backup_dir, directory_name)
except Exception as outer_exception:
print("CouldNOtRestoreDirectBackup")
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
for a, b, c in os.walk(directory_name):
print("RESTORED", a, b, c)
tty.debug("Directory recovered [{0}]".format(directory_name))
raise
else:
print("NO FAILURE")
# Otherwise delete the temporary directory
shutil.rmtree(tmpdir, ignore_errors=True)
tty.debug("Temporary directory deleted [{0}]".format(tmpdir))

View File

@@ -15,20 +15,7 @@
import typing
import warnings
from datetime import datetime, timedelta
from typing import (
Any,
Callable,
Dict,
Generic,
Iterable,
Iterator,
List,
Mapping,
Optional,
Tuple,
TypeVar,
Union,
)
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
@@ -437,39 +424,46 @@ def add_func_to_class(name, func):
return cls
K = TypeVar("K")
V = TypeVar("V")
@lazy_lexicographic_ordering
class HashableMap(typing.MutableMapping[K, V]):
class HashableMap(collections.abc.MutableMapping):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
__slots__ = ("dict",)
def __init__(self):
self.dict: Dict[K, V] = {}
self.dict = {}
def __getitem__(self, key: K) -> V:
def __getitem__(self, key):
return self.dict[key]
def __setitem__(self, key: K, value: V) -> None:
def __setitem__(self, key, value):
self.dict[key] = value
def __iter__(self) -> Iterator[K]:
def __iter__(self):
return iter(self.dict)
def __len__(self) -> int:
def __len__(self):
return len(self.dict)
def __delitem__(self, key: K) -> None:
def __delitem__(self, key):
del self.dict[key]
def _cmp_iter(self):
for _, v in sorted(self.items()):
yield v
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
self_type = type(self)
clone = self_type()
# Copy everything from this dict into it.
for key in self:
clone[key] = self[key].copy()
return clone
def match_predicate(*args):
"""Utility function for making string matching predicates.
@@ -1053,28 +1047,19 @@ def __exit__(self, exc_type, exc_value, tb):
return True
ClassPropertyType = TypeVar("ClassPropertyType")
class classproperty(Generic[ClassPropertyType]):
class classproperty:
"""Non-data descriptor to evaluate a class-level property. The function that performs
the evaluation is injected at creation time and takes an owner (i.e., the class that
originated the instance).
the evaluation is injected at creation time and take an instance (could be None) and
an owner (i.e. the class that originated the instance)
"""
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
def __init__(self, callback):
self.callback = callback
def __get__(self, instance, owner) -> ClassPropertyType:
def __get__(self, instance, owner):
return self.callback(owner)
#: A type alias that represents either a classproperty descriptor or a constant value of the same
#: type. This allows derived classes to override a computed class-level property with a constant
#: value while retaining type compatibility.
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
class DeprecatedProperty:
"""Data descriptor to error or warn when a deprecated property is accessed.

View File

@@ -18,7 +18,7 @@
#: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version.
package_api_version = (2, 0)
package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies

View File

@@ -92,7 +92,7 @@ def update_installations(self) -> None:
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
self.write(regenerate=False)
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
self.install_all()
self.install_all(fail_fast=True)
self.write(regenerate=True)
def load(self) -> None:

View File

@@ -31,12 +31,14 @@
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
import inspect
import io
import multiprocessing
import os
import re
import signal
import stat
import sys
import traceback
import types
@@ -46,6 +48,7 @@
from itertools import chain
from multiprocessing.connection import Connection
from typing import (
Any,
Callable,
Dict,
List,
@@ -452,6 +455,35 @@ def optimization_flags(compiler, target):
return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
@@ -574,10 +606,12 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc")
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc")
module.nmake = Executable("nmake")
module.msbuild = Executable("msbuild")
# analog to configure for win32
module.cscript = Executable("cscript")
@@ -1091,6 +1125,52 @@ def load_external_modules(context: SetupContext) -> None:
load_module(external_module)
class ProcessHandle:
"""Manages and monitors the state of a child process for package installation."""
def __init__(
self,
pkg: "spack.package_base.PackageBase",
process: multiprocessing.Process,
read_pipe: multiprocessing.connection.Connection,
timeout: int,
):
"""
Parameters:
pkg: The package to be built and installed by the child process.
process: The child process instance being managed/monitored.
read_pipe: The pipe used for receiving information from the child process.
"""
self.pkg = pkg
self.process = process
self.read_pipe = read_pipe
self.timeout = timeout
def poll(self) -> bool:
"""Check if there is data available to receive from the read pipe."""
return self.read_pipe.poll()
def complete(self):
"""Wait (if needed) for child process to complete
and return its exit status.
See ``complete_build_process()``.
"""
return complete_build_process(self)
def terminate_processes(self):
"""Terminate the active child processes if installation failure/error"""
if self.process.is_alive():
# opportunity for graceful termination
self.process.terminate()
self.process.join(timeout=1)
# if the process didn't gracefully terminate, forcefully kill
if self.process.is_alive():
os.kill(self.process.pid, signal.SIGKILL)
self.process.join()
def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable,
@@ -1104,7 +1184,7 @@ def _setup_pkg_and_run(
``_setup_pkg_and_run`` is called by the child process created in
``start_build_process()``, and its main job is to run ``function()`` on behalf of
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
some Spack installation (see :ref:`spack.installer.PackageInstaller._complete_task`).
The child process is passed a ``write_pipe``, on which it's expected to send one of
the following:
@@ -1241,19 +1321,30 @@ def terminate(self):
os.kill(self.p.pid, signal.SIGKILL)
self.p.join()
@property
def pid(self):
return self.p.pid
@property
def exitcode(self):
return self.p.exitcode
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
def start_build_process(
pkg: "spack.package_base.PackageBase",
function: Callable,
kwargs: Dict[str, Any],
*,
timeout: Optional[int] = None,
):
"""Create a child process to do part of a spack build.
Args:
pkg (spack.package_base.PackageBase): package whose environment we should set up the
pkg: package whose environment we should set up the
child process for.
function (typing.Callable): argless function to run in the child process.
function: argless function to run in the child
process.
kwargs: additional keyword arguments to pass to ``function()``
timeout: maximum time allowed to finish the execution of function
Usage::
@@ -1267,9 +1358,6 @@ def child_fun():
control over the environment, etc. without affecting other builds
that might be executed in the same spack call.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
input_fd = None
@@ -1319,10 +1407,27 @@ def child_fun():
if input_fd is not None:
input_fd.close()
def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}"
# Create a ProcessHandle that the caller can use to track
# and complete the process started by this function.
process_handle = ProcessHandle(pkg, p, read_pipe, timeout=timeout)
return process_handle
def complete_build_process(handle: ProcessHandle):
"""
Waits for the child process to complete and handles its exit status.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
def exitcode_msg(process):
typ = "exit" if handle.process.exitcode >= 0 else "signal"
return f"{typ} {abs(handle.process.exitcode)}"
p = handle.process
timeout = handle.timeout
p.join(timeout=timeout)
if p.is_alive():
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
@@ -1330,18 +1435,23 @@ def exitcode_msg(p):
p.join()
try:
child_result = read_pipe.recv()
# Check if information from the read pipe has been received.
child_result = handle.read_pipe.recv()
except EOFError:
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
handle.process.join()
raise InstallError(
f"The process has stopped unexpectedly ({exitcode_msg(handle.process)})"
)
handle.process.join()
# If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase):
# do not print
raise child_result
# let the caller know which package went wrong.
if isinstance(child_result, InstallError):
child_result.pkg = pkg
child_result.pkg = handle.pkg
if isinstance(child_result, ChildError):
# If the child process raised an error, print its output here rather
@@ -1352,13 +1462,13 @@ def exitcode_msg(p):
raise child_result
# Fallback. Usually caught beforehand in EOFError above.
if p.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
if handle.process.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(handle.process)})")
return child_result
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.BaseBuilder)
def get_package_context(traceback, context=3):

View File

@@ -16,7 +16,6 @@
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.environment
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when
@@ -847,9 +846,7 @@ def _remove_libtool_archives(self) -> None:
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
f.write("\n".join(libtool_files))
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.

View File

@@ -8,7 +8,6 @@
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.environment
import spack.util.prefix
from spack.directives import build_system, depends_on
from spack.multimethod import when
@@ -87,9 +86,7 @@ def check_args(self):
"""Argument for ``cargo test`` during check phase"""
return []
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
env.set("CARGO_HOME", self.stage.path)
def build(

View File

@@ -47,11 +47,6 @@ class CompilerPackage(spack.package_base.PackageBase):
#: Relative path to compiler wrappers
compiler_wrapper_link_paths: Dict[str, str] = {}
#: Optimization flags
opt_flags: Sequence[str] = []
#: Flags for generating debug information
debug_flags: Sequence[str] = []
def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages"

View File

@@ -8,7 +8,6 @@
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.environment
import spack.util.prefix
from spack.directives import build_system, depends_on
from spack.multimethod import when
@@ -69,9 +68,7 @@ class GoBuilder(BuilderWithDefaults):
#: Callback names for install-time test
install_time_test_callbacks = ["check"]
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
env.set("GO111MODULE", "on")
env.set("GOTOOLCHAIN", "local")
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))

View File

@@ -23,7 +23,6 @@
import spack.error
import spack.phase_callbacks
import spack.spec
from spack.build_environment import dso_suffix
from spack.error import InstallError
from spack.util.environment import EnvironmentModifications
@@ -1017,7 +1016,7 @@ def libs(self):
debug_print(result)
return result
def setup_run_environment(self, env: EnvironmentModifications) -> None:
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
These environment variables come from running:
@@ -1050,13 +1049,11 @@ def setup_run_environment(self, env: EnvironmentModifications) -> None:
env.set("F77", self.prefix.bin.ifort)
env.set("F90", self.prefix.bin.ifort)
def setup_dependent_build_environment(
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
) -> None:
def setup_dependent_build_environment(self, env, dependent_spec):
# NB: This function is overwritten by 'mpi' provider packages:
#
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
# var/spack/repos/builtin/packages/intel-mpi/package.py
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
#
# They call _setup_dependent_env_callback() as well, but with the
# dictionary kwarg compilers_of_client{} present and populated.
@@ -1064,12 +1061,7 @@ def setup_dependent_build_environment(
# Handle everything in a callback version.
self._setup_dependent_env_callback(env, dependent_spec)
def _setup_dependent_env_callback(
self,
env: EnvironmentModifications,
dependent_spec: spack.spec.Spec,
compilers_of_client={},
) -> None:
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
# Expected to be called from a client's
# setup_dependent_build_environment(),
# with args extended to convey the client's compilers as needed.

View File

@@ -8,7 +8,6 @@
import spack.builder
import spack.package_base
import spack.spec
import spack.util.environment
import spack.util.executable
import spack.util.prefix
from spack.directives import build_system, depends_on, extends
@@ -115,7 +114,5 @@ def install(
def _luarocks_config_path(self):
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())

View File

@@ -4,7 +4,6 @@
import spack.builder
import spack.package_base
import spack.spec
import spack.util.environment
import spack.util.prefix
from spack.directives import build_system, extends
from spack.multimethod import when
@@ -58,9 +57,7 @@ def install(
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
)
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
# octave does not like those environment variables to be set:
env.unset("CC")
env.unset("CXX")

View File

@@ -106,8 +106,8 @@ def install_component(self, installer_path):
bash = Executable("bash")
# Installer writes files in ~/intel set HOME so it goes to staging directory
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
# Installer writes files in ~/intel set HOME so it goes to prefix
bash.add_default_env("HOME", self.prefix)
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
if not isdir(install_dir):
raise RuntimeError("install failed to directory: {0}".format(install_dir))
def setup_run_environment(self, env: EnvironmentModifications) -> None:
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
These environment variables come from running:

View File

@@ -13,9 +13,9 @@
import archspec
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path
from llnl.util.lang import ClassProperty, classproperty, match_predicate
import spack.builder
import spack.config
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
namespaces = set(x.package.py_namespace for x in ext_map.values())
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
find_namespace = match_predicate(namespace_re)
find_namespace = lang.match_predicate(namespace_re)
if self.py_namespace in namespaces:
conflicts = list(x for x in conflicts if not find_namespace(x))
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
)
if self.py_namespace in remaining_namespaces:
namespace_init = match_predicate(
namespace_init = lang.match_predicate(
r"site-packages/{0}/__init__.py".format(self.py_namespace)
)
ignore_namespace = True
@@ -324,27 +324,6 @@ def get_external_python_for_prefix(self):
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
def _homepage(cls: "PythonPackage") -> Optional[str]:
"""Get the homepage from PyPI if available."""
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/project/{name}/"
return None
def _url(cls: "PythonPackage") -> Optional[str]:
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
def _list_url(cls: "PythonPackage") -> Optional[str]:
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/simple/{name}/"
return None
class PythonPackage(PythonExtension):
"""Specialized class for packages that are built using pip."""
@@ -372,9 +351,25 @@ class PythonPackage(PythonExtension):
py_namespace: Optional[str] = None
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
url: ClassProperty[Optional[str]] = classproperty(_url)
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
@lang.classproperty
def homepage(cls) -> Optional[str]: # type: ignore[override]
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/project/{name}/"
return None
@lang.classproperty
def url(cls) -> Optional[str]:
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
@lang.classproperty
def list_url(cls) -> Optional[str]: # type: ignore[override]
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/simple/{name}/"
return None
@property
def python_spec(self) -> Spec:

View File

@@ -3,8 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Optional, Tuple
import llnl.util.lang as lang
from llnl.util.filesystem import mkdirp
from llnl.util.lang import ClassProperty, classproperty
from spack.directives import extends
@@ -54,32 +54,6 @@ def install(self, pkg, spec, prefix):
pkg.module.R(*args)
def _homepage(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/package={cls.cran}"
elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}"
return None
def _url(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
return None
def _list_url(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
return None
def _git(cls: "RPackage") -> Optional[str]:
if cls.bioc:
return f"https://git.bioconductor.org/packages/{cls.bioc}"
return None
class RPackage(Package):
"""Specialized class for packages that are built using R.
@@ -103,7 +77,24 @@ class RPackage(Package):
extends("r")
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
url: ClassProperty[Optional[str]] = classproperty(_url)
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
git: ClassProperty[Optional[str]] = classproperty(_git)
@lang.classproperty
def homepage(cls):
if cls.cran:
return f"https://cloud.r-project.org/package={cls.cran}"
elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}"
@lang.classproperty
def url(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
@lang.classproperty
def list_url(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
@lang.classproperty
def git(cls):
if cls.bioc:
return f"https://git.bioconductor.org/packages/{cls.bioc}"

View File

@@ -5,8 +5,8 @@
from typing import Optional, Tuple
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.lang import ClassProperty, classproperty
import spack.builder
import spack.spec
@@ -19,12 +19,6 @@
from spack.util.executable import Executable, ProcessError
def _homepage(cls: "RacketPackage") -> Optional[str]:
if cls.racket_name:
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
return None
class RacketPackage(PackageBase):
"""Specialized class for packages that are built using Racket's
`raco pkg install` and `raco setup` commands.
@@ -43,7 +37,13 @@ class RacketPackage(PackageBase):
extends("racket", when="build_system=racket")
racket_name: Optional[str] = None
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
parallel = True
@lang.classproperty
def homepage(cls):
if cls.racket_name:
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
return None
@spack.builder.builder("racket")

View File

@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
"""Return the builder class if a package module defines it."""
cls = getattr(pkg.module, name, None)
if cls and spack.repo.is_package_module(cls.__module__):
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
return cls
return None
@@ -121,7 +121,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
new_cls_name,
bases,
{
"__module__": package_cls.__module__,
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
"test_requires_compiler": property(
lambda x: x.wrapped_package_object.test_requires_compiler
@@ -130,6 +129,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
"tester": property(lambda x: x.wrapped_package_object.tester),
},
)
new_cls.__module__ = package_cls.__module__
self.__class__ = new_cls
self.__dict__.update(wrapped_pkg_object.__dict__)
@@ -185,16 +185,10 @@ def __init__(self, pkg):
# These two methods don't follow the (self, spec, prefix) signature of phases nor
# the (self) signature of methods, so they are added explicitly to avoid using a
# catch-all (*args, **kwargs)
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
def setup_build_environment(self, env):
return self.pkg_with_dispatcher.setup_build_environment(env)
def setup_dependent_build_environment(
self,
env: spack.util.environment.EnvironmentModifications,
dependent_spec: spack.spec.Spec,
) -> None:
def setup_dependent_build_environment(self, env, dependent_spec):
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
return Adapter(pkg)
@@ -408,7 +402,7 @@ def fixup_install(self):
# do something after the package is installed
pass
def setup_build_environment(self, env: EnvironmentModifications) -> None:
def setup_build_environment(self, env):
env.set("MY_ENV_VAR", "my_value")
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):

View File

@@ -24,7 +24,6 @@
import spack
import spack.binary_distribution as bindist
import spack.builder
import spack.config as cfg
import spack.environment as ev
import spack.error
@@ -150,10 +149,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
return False
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
"""Determine which packages were added, removed or changed
between rev1 and rev2, and return the names as a set"""
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
@@ -614,40 +613,32 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_spec, and attempts to copy the files into the directory given
by job_log_dir.
Parameters:
Args:
job_spec: spec associated with spack install log
job_log_dir: path into which build log should be copied
"""
tty.debug(f"job spec: {job_spec}")
if not job_spec.concrete:
tty.warn("Cannot copy artifacts for non-concrete specs")
try:
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
except spack.error.SpackError as e:
tty.error(f"Cannot copy logs: {str(e)}")
return
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
if not os.path.isdir(package_metadata_root):
# Fallback to using the stage directory
job_pkg = job_spec.package
package_metadata_root = pathlib.Path(job_pkg.stage.path)
archive_files = spack.builder.create(job_pkg).archive_files
tty.warn("Package not installed, falling back to use stage dir")
tty.debug(f"stage dir: {package_metadata_root}")
# Get the package's archived files
archive_files = []
archive_root = package_metadata_root / "archived-files"
if archive_root.is_dir():
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
else:
# Get the package's archived files
archive_files = []
archive_root = package_metadata_root / "archived-files"
if os.path.isdir(archive_root):
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
else:
tty.debug(f"No archived files detected at {archive_root}")
msg = "Cannot copy package archived files: archived-files must be a directory"
tty.warn(msg)
# Try zipped and unzipped versions of the build log
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
build_log = package_metadata_root / "spack-build-out.txt"
build_env_mods = package_metadata_root / "spack-build-env.txt"
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
for f in [build_log_zipped, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir)
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
@@ -660,12 +651,11 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
"""
tty.debug(f"test stage: {test_stage}")
if not os.path.exists(test_stage):
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
tty.error(msg)
return
copy_files_to_artifacts(
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
)
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def download_and_extract_artifacts(url, work_dir) -> str:

View File

@@ -2,13 +2,9 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import errno
import glob
import gzip
import json
import os
import re
import shutil
import sys
import time
from collections import deque
@@ -29,7 +25,6 @@
import spack.mirrors.mirror
import spack.schema
import spack.spec
import spack.util.compression as compression
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
@@ -45,67 +40,22 @@
_urlopen = web_util.urlopen
def copy_gzipped(glob_or_path: str, dest: str) -> None:
"""Copy all of the files in the source glob/path to the destination.
Args:
glob_or_path: path to file to test
dest: destination path to copy to
"""
files = glob.glob(glob_or_path)
if not files:
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
)
def is_gzipped(path):
with open(path, "rb") as fd:
return compression.GZipFileType().matches_magic(fd)
for src in files:
if is_gzipped(src):
fs.copy(src, dest)
else:
# Compress and copy in one step
src_name = os.path.basename(src)
if os.path.isdir(dest):
zipped = os.path.join(dest, f"{src_name}.gz")
elif not dest.endswith(".gz"):
zipped = f"{dest}.gz"
else:
zipped = dest
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
shutil.copyfileobj(fin, fout)
def copy_files_to_artifacts(
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
) -> None:
def copy_files_to_artifacts(src, artifacts_dir):
"""
Copy file(s) to the given artifacts directory
Args:
Parameters:
src (str): the glob-friendly path expression for the file(s) to copy
artifacts_dir (str): the destination directory
compress_artifacts (bool): option to compress copied artifacts using Gzip
"""
try:
if compress_artifacts:
copy_gzipped(src, artifacts_dir)
else:
fs.copy(src, artifacts_dir)
fs.copy(src, artifacts_dir)
except Exception as err:
tty.warn(
(
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
f"exception: {str(err)}"
)
msg = (
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
f"exception: {str(err)}"
)
tty.warn(msg)
def win_quote(quote_str: str) -> str:

View File

@@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
all_headers (bool): show headers even when arch/compiler aren't defined
status_fn (typing.Callable): if provided, prepend install-status info
output (typing.IO): A file object to write to. Default is ``sys.stdout``
specfile_format (bool): specfile format of the current spec
"""
def get_arg(name, default=None):
@@ -458,7 +458,6 @@ def get_arg(name, default=None):
all_headers = get_arg("all_headers", False)
output = get_arg("output", sys.stdout)
status_fn = get_arg("status_fn", None)
specfile_format = get_arg("specfile_format", False)
decorator = get_arg("decorator", None)
if decorator is None:
@@ -480,9 +479,6 @@ def get_arg(name, default=None):
vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + vfmt + ffmt
if specfile_format:
format_string = "[{specfile_version}] " + format_string
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""

View File

@@ -76,6 +76,9 @@ def setup_parser(subparser: argparse.ArgumentParser):
default=False,
help="regenerate buildcache index after building package(s)",
)
push.add_argument(
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
)
push.add_argument(
"--only",
default="package,dependencies",
@@ -189,14 +192,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
default=lambda: spack.config.default_modify_scope(),
help="configuration scope containing mirrors to check",
)
# Unfortunately there are 3 ways to do the same thing here:
check_specs = check.add_mutually_exclusive_group()
check_specs.add_argument(
"-s", "--spec", help="check single spec instead of release specs file"
)
check_specs.add_argument(
"--spec-file",
help="check single spec from json or yaml file instead of release specs file",
)
arguments.add_common_arguments(check, ["specs"])
check.set_defaults(func=check_fn)
# Download tarball and specfile
download = subparsers.add_parser("download", help=download_fn.__doc__)
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror")
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
download_spec_or_specfile.add_argument(
"-s", "--spec", help="download built tarball for spec from mirror"
)
download_spec_or_specfile.add_argument(
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
)
download.add_argument(
"-p",
"--path",
@@ -206,10 +223,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
)
download.set_defaults(func=download_fn)
# Get buildcache name
getbuildcachename = subparsers.add_parser(
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
)
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
required=True
)
getbuildcachename_spec_or_specfile.add_argument(
"-s", "--spec", help="spec string for which buildcache name is desired"
)
getbuildcachename_spec_or_specfile.add_argument(
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
)
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
savespecfile_spec_or_specfile.add_argument(
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
)
savespecfile.add_argument(
"-s",
"--specs",
@@ -345,8 +380,14 @@ def _specs_to_be_packaged(
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.specs:
roots = _matching_specs(spack.cmd.parse_specs(args.specs))
if args.spec_file:
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
"Use positional arguments instead."
)
if args.specs or args.spec_file:
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
@@ -488,7 +529,22 @@ def check_fn(args: argparse.Namespace):
this command uses the process exit code to indicate its result, specifically, if the
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
"""
specs_arg = args.specs
if args.spec_file:
specs_arg = (
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
)
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
f"Use `spack buildcache check {specs_arg}` instead."
)
elif args.spec:
specs_arg = args.spec
tty.warn(
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
f"Use `spack buildcache check {specs_arg}` instead."
)
else:
specs_arg = args.specs
if specs_arg:
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
@@ -522,7 +578,13 @@ def download_fn(args):
code indicates that the command failed to download at least one of the required buildcache
components
"""
specs = _matching_specs(spack.cmd.parse_specs(args.spec))
if args.spec_file:
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
"Use --spec instead."
)
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
if len(specs) != 1:
tty.die("a single spec argument is required to download from a buildcache")
@@ -531,6 +593,15 @@ def download_fn(args):
sys.exit(1)
def get_buildcache_name_fn(args):
"""get name (prefix) of buildcache entries for this spec"""
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
if len(specs) != 1:
tty.die("a single spec argument is required to get buildcache name")
print(bindist.tarball_name(specs[0], ""))
def save_specfile_fn(args):
"""get full spec for dependencies and write them to files in the specified output directory
@@ -538,7 +609,13 @@ def save_specfile_fn(args):
successful. if any errors or exceptions are encountered, or if expected command-line arguments
are not provided, then the exit code will be non-zero
"""
specs = spack.cmd.parse_specs(args.root_spec)
if args.root_specfile:
tty.warn(
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
"Use --root-spec instead."
)
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
if len(specs) != 1:
tty.die("a single spec argument is required to save specfile")

View File

@@ -453,7 +453,7 @@ def ci_rebuild(args):
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--keep-stage", "--only=package"]
root_install_args = install_args + ["--only=package"]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
@@ -493,9 +493,6 @@ def ci_rebuild(args):
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# Clear the stage directory
spack.stage.purge()
# If the installation succeeded and we're running stand-alone tests for
# the package, run them and copy the output. Failures of any kind should
# *not* terminate the build process or preclude creating the build cache.
@@ -791,9 +788,7 @@ def ci_verify_versions(args):
"""
# Get a list of all packages that have been changed or added
# between from_ref and to_ref
pkgs = spack.repo.get_all_package_diffs(
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
)
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
failed_version = False
for pkg_name in pkgs:

View File

@@ -97,7 +97,7 @@ def _specs(self, **kwargs):
class SetParallelJobs(argparse.Action):
"""Sets the correct value for parallel build jobs.
The value is is set in the command line configuration scope so that
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
@@ -113,6 +113,23 @@ def __call__(self, parser, namespace, jobs, option_string):
setattr(namespace, "jobs", jobs)
class SetConcurrentPackages(argparse.Action):
"""Sets the value for maximum number of concurrent package builds
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
def __call__(self, parser, namespace, concurrent_packages, option_string):
if concurrent_packages < 1:
msg = 'invalid value for argument "{0}" ' '[expectd a positive integer, got "{1}"]'
raise ValueError(msg.format(option_string, concurrent_packages))
spack.config.set("config:concurrent_packages", concurrent_packages, scope="command_line")
setattr(namespace, "concurrent_packages", concurrent_packages)
class DeptypeAction(argparse.Action):
"""Creates a flag of valid dependency types from a deptype argument."""
@@ -377,6 +394,18 @@ def jobs():
)
@arg
def concurrent_packages():
return Args(
"-p",
"--concurrent-packages",
action=SetConcurrentPackages,
type=int,
default=4,
help="maximum number of packages to build concurrently",
)
@arg
def install_status():
return Args(

View File

@@ -63,7 +63,7 @@ def setup_parser(subparser):
)
# List
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
list_parser = sp.add_parser("list", help="list available compilers")
list_parser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
)
@@ -216,6 +216,5 @@ def compiler(parser, args):
"rm": compiler_remove,
"info": compiler_info,
"list": compiler_list,
"ls": compiler_list,
}
action[args.compiler_command](args)

View File

@@ -23,7 +23,7 @@
from spack.util.editor import editor
from spack.util.executable import which
from spack.util.format import get_version_lines
from spack.util.naming import pkg_name_to_class_name, simplify_name
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
description = "create a new package file"
section = "packaging"
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
def __init__(self, name: str, versions, languages: List[str]):
self.name = name
self.class_name = pkg_name_to_class_name(name)
self.class_name = mod_to_class(name)
self.versions = versions
self.languages = languages
@@ -874,7 +874,7 @@ def get_name(name, url):
result = simplify_name(result)
if not re.match(r"^[a-z0-9-]+$", result):
if not valid_fully_qualified_module_name(result):
tty.die("Package name can only contain a-z, 0-9, and '-'")
return result

View File

@@ -102,7 +102,7 @@ def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Sp
)
else:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])

View File

@@ -62,7 +62,7 @@ def setup_parser(subparser):
"package Spack knows how to find."
)
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
sp.add_parser("list", help="list detectable packages, by repository and name")
read_cray_manifest = sp.add_parser(
"read-cray-manifest",
@@ -259,7 +259,6 @@ def external(parser, args):
action = {
"find": external_find,
"list": external_list,
"ls": external_list,
"read-cray-manifest": external_read_cray_manifest,
}
action[args.external_command](args)

View File

@@ -51,12 +51,6 @@ def setup_parser(subparser):
"-I", "--install-status", action="store_true", help="show install status of packages"
)
subparser.add_argument(
"--specfile-format",
action="store_true",
help="show the specfile format for installed deps ",
)
subparser.add_argument(
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
)
@@ -286,7 +280,6 @@ def root_decorator(spec, string):
show_flags=True,
decorator=root_decorator,
variants=True,
specfile_format=args.specfile_format,
)
print()
@@ -308,7 +301,6 @@ def root_decorator(spec, string):
namespace=True,
show_flags=True,
variants=True,
specfile_format=args.specfile_format,
)
print()
@@ -398,12 +390,7 @@ def find(parser, args):
if args.show_concretized:
display_results += concretized_but_not_installed
cmd.display_specs(
display_results,
args,
decorator=decorator,
all_headers=True,
status_fn=status_fn,
specfile_format=args.specfile_format,
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
)
# print number of installed packages last (as the list may be long)

View File

@@ -63,6 +63,7 @@ def install_kwargs_from_args(args):
"unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install),
"install_package": ("package" in args.things_to_install),
"concurrent_packages": args.concurrent_packages,
}
@@ -84,6 +85,7 @@ def setup_parser(subparser):
default=None,
help="phase to stop after when installing (default None)",
)
arguments.add_common_arguments(subparser, ["concurrent_packages"])
arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
"--overwrite",
@@ -329,16 +331,8 @@ def install(parser, args):
arguments.sanitize_reporter_options(args)
def reporter_factory(specs):
if args.log_format is None:
return lang.nullcontext()
return spack.report.build_context_manager(
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
)
reporter = args.reporter() if args.log_format else None
install_kwargs = install_kwargs_from_args(args)
env = ev.active_environment()
if not env and not args.spec and not args.specfiles:
@@ -346,9 +340,9 @@ def reporter_factory(specs):
try:
if env:
install_with_active_env(env, args, install_kwargs, reporter_factory)
install_with_active_env(env, args, install_kwargs, reporter)
else:
install_without_active_env(args, install_kwargs, reporter_factory)
install_without_active_env(args, install_kwargs, reporter)
except InstallError as e:
if args.show_log_on_error:
_dump_log_on_error(e)
@@ -382,7 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
env.write(regenerate=False)
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter):
specs = spack.cmd.parse_specs(args.spec)
# The following two commands are equivalent:
@@ -416,8 +410,10 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
try:
with reporter_factory(specs_to_install):
env.install_specs(specs_to_install, **install_kwargs)
report_file = report_filename(args, specs_to_install)
install_kwargs["report_file"] = report_file
install_kwargs["reporter"] = reporter
env.install_specs(specs_to_install, **install_kwargs)
finally:
if env.views:
with env.write_transaction():
@@ -461,18 +457,23 @@ def concrete_specs_from_file(args):
return result
def install_without_active_env(args, install_kwargs, reporter_factory):
def install_without_active_env(args, install_kwargs, reporter):
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
if len(concrete_specs) == 0:
tty.die("The `spack install` command requires a spec to install.")
with reporter_factory(concrete_specs):
if args.overwrite:
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
if args.overwrite:
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
try:
builder = PackageInstaller(installs, **install_kwargs)
builder.install()
finally:
if reporter:
report_file = report_filename(args, concrete_specs)
reporter.build_report(report_file, list(builder.reports.values()))

View File

@@ -10,13 +10,11 @@
import re
import sys
from html import escape
from typing import Type
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack.deptypes as dt
import spack.package_base
import spack.repo
from spack.cmd.common import arguments
from spack.version import VersionList
@@ -141,10 +139,10 @@ def name_only(pkgs, out):
tty.msg("%d packages" % len(pkgs))
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
def github_url(pkg):
"""Link to a package file on github."""
mod_path = pkg.__module__.replace(".", "/")
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
return url.format(pkg.name)
def rows_for_ncols(elts, ncols):

View File

@@ -89,17 +89,17 @@ def setup_parser(subparser):
def pkg_add(args):
"""add a package to the git stage with `git add`"""
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
spack.repo.add_package_to_git_stage(args.packages)
def pkg_list(args):
"""list packages associated with a particular spack git revision"""
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
colify(spack.repo.list_packages(args.rev))
def pkg_diff(args):
"""compare packages available in two different git revisions"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u1:
print("%s:" % args.rev1)
@@ -114,23 +114,21 @@ def pkg_diff(args):
def pkg_removed(args):
"""show packages removed since a commit"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u1:
colify(sorted(u1))
def pkg_added(args):
"""show packages added since a commit"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u2:
colify(sorted(u2))
def pkg_changed(args):
"""show packages changed since a commit"""
packages = spack.repo.get_all_package_diffs(
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
)
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
if packages:
colify(sorted(packages))

View File

@@ -4,7 +4,6 @@
import os
import sys
from typing import List
import llnl.util.tty as tty
@@ -25,7 +24,9 @@ def setup_parser(subparser):
create_parser = sp.add_parser("create", help=repo_create.__doc__)
create_parser.add_argument("directory", help="directory to create the repo in")
create_parser.add_argument(
"namespace", help="name or namespace to identify packages in the repository"
"namespace",
help="namespace to identify packages in the repository (defaults to the directory name)",
nargs="?",
)
create_parser.add_argument(
"-d",
@@ -137,7 +138,7 @@ def repo_remove(args):
def repo_list(args):
"""show registered repositories and their namespaces"""
roots = spack.config.get("repos", scope=args.scope)
repos: List[spack.repo.Repo] = []
repos = []
for r in roots:
try:
repos.append(spack.repo.from_path(r))
@@ -145,14 +146,17 @@ def repo_list(args):
continue
if sys.stdout.isatty():
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
msg = "%d package repositor" % len(repos)
msg += "y." if len(repos) == 1 else "ies."
tty.msg(msg)
if not repos:
return
max_ns_len = max(len(r.namespace) for r in repos)
for repo in repos:
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
fmt = "%%-%ds%%s" % (max_ns_len + 4)
print(fmt % (repo.namespace, repo.root))
def repo(parser, args):

View File

@@ -136,7 +136,20 @@ def solve(parser, args):
setup_only = set(show) == {"asp"}
unify = spack.config.get("concretizer:unify")
allow_deprecated = spack.config.get("config:deprecated", False)
if unify == "when_possible":
if unify != "when_possible":
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
for idx, result in enumerate(
solver.solve_in_rounds(
specs,
@@ -153,29 +166,3 @@ def solve(parser, args):
print("% END ROUND {0}\n".format(idx))
if not setup_only:
_process_result(result, show, required_format, kwargs)
elif unify:
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
for spec in specs:
tty.msg("SOLVING SPEC:", spec)
result = solver.solve(
[spec],
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)

View File

@@ -59,7 +59,7 @@ def is_package(f):
packages, since we allow `from spack import *` and poking globals
into packages.
"""
return f.startswith("var/spack/") and f.endswith("package.py")
return f.startswith("var/spack/repos/") and f.endswith("package.py")
#: decorator for adding tools to the list
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
def _module_part(root: str, expr: str):
parts = expr.split(".")
# spack.pkg is for repositories, don't try to resolve it here.
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
return None
while parts:
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"

View File

@@ -18,10 +18,6 @@ class Languages(enum.Enum):
class CompilerAdaptor:
"""Provides access to compiler attributes via `Package.compiler`. Useful for
packages which do not yet access compiler properties via `self.spec[language]`.
"""
def __init__(
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
) -> None:
@@ -83,14 +79,6 @@ def implicit_rpaths(self) -> List[str]:
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
return result
@property
def opt_flags(self) -> List[str]:
return next(iter(self.compilers.values())).package.opt_flags
@property
def debug_flags(self) -> List[str]:
return next(iter(self.compilers.values())).package.debug_flags
@property
def openmp_flag(self) -> str:
return next(iter(self.compilers.values())).package.openmp_flag
@@ -152,7 +140,7 @@ def c17_flag(self) -> str:
@property
def c23_flag(self) -> str:
return self.compilers[Languages.C].package.standard_flag(
language=Languages.C.value, standard="23"
language=Languages.C.value, standard="17"
)
@property

View File

@@ -1638,7 +1638,7 @@ def determine_number_of_jobs(
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))
return min(max_cpus, cfg.get("config:build_jobs", 4))
class ConfigSectionError(spack.error.ConfigError):

View File

@@ -149,12 +149,12 @@ def _getfqdn():
return socket.getfqdn()
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
reader_cls = {
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1,
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3,
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4,
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5,
vn.Version("5"): spack.spec.SpecfileV1,
vn.Version("6"): spack.spec.SpecfileV3,
vn.Version("7"): spack.spec.SpecfileV4,
vn.Version("8"): spack.spec.SpecfileV5,
}
return reader_cls[version]
@@ -824,7 +824,7 @@ def check(cond, msg):
db = fdata["database"]
check("version" in db, "no 'version' in JSON DB.")
self.db_version = vn.StandardVersion.from_string(db["version"])
self.db_version = vn.Version(db["version"])
if self.db_version > _DB_VERSION:
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
elif self.db_version < _DB_VERSION:

View File

@@ -20,7 +20,7 @@
import sys
from typing import Dict, List, Optional, Set, Tuple, Union
from llnl.util import tty
import llnl.util.tty
import spack.config
import spack.error
@@ -93,13 +93,14 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
except spack.error.SpackError:
# It is assumed here that we can at least extract the package name from the spec so we
# can look up the implementation of determine_spec_details
tty.warn(f"Constructed spec for {spec.name} does not have a string representation")
msg = f"Constructed spec for {spec.name} does not have a string representation"
llnl.util.tty.warn(msg)
return False
try:
spack.spec.Spec(str(spec))
except spack.error.SpackError:
tty.warn(
llnl.util.tty.warn(
"Constructed spec has a string representation but the string"
" representation does not evaluate to a valid spec: {0}".format(str(spec))
)
@@ -108,24 +109,20 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
return True
def path_to_dict(search_paths: List[str]) -> Dict[str, str]:
def path_to_dict(search_paths: List[str]):
"""Return dictionary[fullpath]: basename from list of paths"""
path_to_lib: Dict[str, str] = {}
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
# entry overrides later entries
for search_path in reversed(search_paths):
try:
dir_iter = os.scandir(search_path)
with os.scandir(search_path) as entries:
path_to_lib.update(
{entry.path: entry.name for entry in entries if entry.is_file()}
)
except OSError as e:
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
continue
with dir_iter as entries:
for entry in entries:
try:
if entry.is_file():
path_to_lib[entry.path] = entry.name
except OSError as e:
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
llnl.util.tty.debug(msg)
return path_to_lib

View File

@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if spack.repo.is_package_module(cls.__module__):
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
@@ -144,6 +144,7 @@ class Foo(Package):
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)

View File

@@ -284,7 +284,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert path.startswith(self.root)
assert path.startswith(self.root), f"PATH: {path}, ROOT: {self.root}"
if deprecated:
if os.path.exists(path):

View File

@@ -31,6 +31,7 @@
import spack.repo
import spack.schema.env
import spack.spec
import spack.spec_list
import spack.store
import spack.user_environment as uenv
import spack.util.environment
@@ -43,10 +44,10 @@
from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec
from spack.spec_list import SpecList
from spack.util.path import substitute_path_variables
from ..enums import ConfigScopePriority
from .list import SpecList, SpecListError, SpecListParser
SpecPair = spack.concretize.SpecPair
@@ -931,10 +932,8 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
self.new_specs: List[Spec] = []
self.views: Dict[str, ViewDescriptor] = {}
#: Parser for spec lists
self._spec_lists_parser = SpecListParser()
#: Specs from "spack.yaml"
self.spec_lists: Dict[str, SpecList] = {}
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
#: User specs from the last concretization
self.concretized_user_specs: List[Spec] = []
#: Roots associated with the last concretization, in order
@@ -1002,6 +1001,26 @@ def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
def _process_definition(self, entry):
"""Process a single spec definition item."""
when_string = entry.get("when")
if when_string is not None:
when = spack.spec.eval_conditional(when_string)
assert len([x for x in entry if x != "when"]) == 1
else:
when = True
assert len(entry) == 1
if when:
for name, spec_list in entry.items():
if name == "when":
continue
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
if name in self.spec_lists:
self.spec_lists[name].extend(user_specs)
else:
self.spec_lists[name] = user_specs
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
"""Process view option(s), which can be boolean, string, or None.
@@ -1063,24 +1082,21 @@ def _process_concrete_includes(self):
def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file."""
self.spec_lists = collections.OrderedDict()
self.views = {}
self._sync_speclists()
self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def _sync_speclists(self):
self.spec_lists = {}
self.spec_lists.update(
self._spec_lists_parser.parse_definitions(
data=spack.config.CONFIG.get("definitions", [])
)
)
for item in spack.config.get("definitions", []):
self._process_definition(item)
env_configuration = self.manifest[TOP_LEVEL_KEY]
spec_list = env_configuration.get(user_speclist_name, [])
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs(
name=user_speclist_name, yaml_list=spec_list
user_specs = SpecList(
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
)
self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def all_concretized_user_specs(self) -> List[Spec]:
"""Returns all of the concretized user specs of the environment and
@@ -1151,7 +1167,9 @@ def clear(self, re_read=False):
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
and needs to be maintained when re-reading an existing environment.
"""
self.spec_lists = {}
self.spec_lists = collections.OrderedDict()
self.spec_lists[user_speclist_name] = SpecList()
self._dev_specs = {}
self.concretized_order = [] # roots of last concretize, in order
self.concretized_user_specs = [] # user specs from last concretize
@@ -1258,6 +1276,22 @@ def destroy(self):
"""Remove this environment from Spack entirely."""
shutil.rmtree(self.path)
def update_stale_references(self, from_list=None):
"""Iterate over spec lists updating references."""
if not from_list:
from_list = next(iter(self.spec_lists.keys()))
index = list(self.spec_lists.keys()).index(from_list)
# spec_lists is an OrderedDict to ensure lists read from the manifest
# are maintainted in order, hence, all list entries after the modified
# list may refer to the modified list requiring stale references to be
# updated.
for i, (name, speclist) in enumerate(
list(self.spec_lists.items())[index + 1 :], index + 1
):
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
speclist.update_reference(new_reference)
def add(self, user_spec, list_name=user_speclist_name):
"""Add a single user_spec (non-concretized) to the Environment
@@ -1277,17 +1311,18 @@ def add(self, user_spec, list_name=user_speclist_name):
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
virtuals = spack.repo.PATH.provider_index.providers.keys()
if spec.name not in virtuals:
raise SpackEnvironmentError(f"no such package: {spec.name}")
msg = "no such package: %s" % spec.name
raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name]
existing = str(spec) in list_to_change.yaml_list
if not existing:
list_to_change.add(str(spec))
self.update_stale_references(list_name)
if list_name == user_speclist_name:
self.manifest.add_user_spec(str(user_spec))
else:
self.manifest.add_definition(str(user_spec), list_name=list_name)
self._sync_speclists()
return bool(not existing)
@@ -1331,17 +1366,18 @@ def change_existing_spec(
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
)
elif len(matches) > 1 and not allow_changing_multiple_specs:
raise ValueError(f"{str(match_spec)} matches multiple specs")
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
for idx, spec in matches:
override_spec = Spec.override(spec, change_spec)
self.spec_lists[list_name].replace(idx, str(override_spec))
if list_name == user_speclist_name:
self.manifest.override_user_spec(str(override_spec), idx=idx)
else:
self.manifest.override_definition(
str(spec), override=str(override_spec), list_name=list_name
)
self._sync_speclists()
self.update_stale_references(from_list=list_name)
def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec"""
@@ -1369,17 +1405,22 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
old_specs = set(self.user_specs)
# Remove specs from the appropriate spec list
new_specs = set()
for spec in matches:
if spec not in list_to_change:
continue
try:
list_to_change.remove(spec)
except SpecListError as e:
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError as e:
# define new specs list
new_specs = set(self.user_specs)
msg = str(e)
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
new_specs.remove(spec)
tty.warn(msg)
else:
if list_name == user_speclist_name:
@@ -1387,11 +1428,7 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
else:
self.manifest.remove_definition(str(spec), list_name=list_name)
# Recompute "definitions" and user specs
self._sync_speclists()
new_specs = set(self.user_specs)
# If 'force', update stale concretized specs
# If force, update stale concretized specs
for spec in old_specs - new_specs:
if force and spec in self.concretized_user_specs:
i = self.concretized_user_specs.index(spec)
@@ -1605,6 +1642,23 @@ def _concretize_separately(self, tests=False):
# Unify the specs objects, so we get correct references to all parents
self._read_lockfile_dict(self._to_lockfile_dict())
# Re-attach information on test dependencies
if tests:
# This is slow, but the information on test dependency is lost
# after unification or when reading from a lockfile.
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
for current_edge in test_edges:
test_dependency = current_edge.spec
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
)
return concretized_specs
@property
@@ -1852,6 +1906,10 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
roots = self.concrete_roots()
specs = specs if specs is not None else roots
# Extract reporter arguments
reporter = install_args.pop("reporter", None)
report_file = install_args.pop("report_file", None)
# Extend the set of specs to overwrite with modified dev specs and their parents
install_args["overwrite"] = {
*install_args.get("overwrite", ()),
@@ -1864,7 +1922,12 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
*(s.dag_hash() for s in roots),
}
PackageInstaller([spec.package for spec in specs], **install_args).install()
try:
builder = PackageInstaller([spec.package for spec in specs], **install_args)
builder.install()
finally:
if reporter:
reporter.build_report(report_file, list(builder.reports.values()))
def all_specs_generator(self) -> Iterable[Spec]:
"""Returns a generator for all concrete specs"""
@@ -2312,12 +2375,8 @@ def update_environment_repository(self) -> None:
def _add_to_environment_repository(self, spec_node: Spec) -> None:
"""Add the root node of the spec to the environment repository"""
namespace: str = spec_node.namespace
repository = spack.repo.create_or_construct(
root=os.path.join(self.repos_path, namespace),
namespace=namespace,
package_api=spack.repo.PATH.get_repo(namespace).package_api,
)
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
pkg_dir = repository.dirname_for_package_name(spec_node.name)
fs.mkdirp(pkg_dir)
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
@@ -2777,8 +2836,6 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
item[list_name].append(user_spec)
break
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True
def remove_definition(self, user_spec: str, list_name: str) -> None:
@@ -2805,8 +2862,6 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
except ValueError:
pass
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
@@ -2832,8 +2887,6 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
except ValueError:
pass
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):

View File

@@ -1,286 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
from typing import Any, Dict, List, NamedTuple, Optional, Union
import spack.spec
import spack.util.spack_yaml
import spack.variant
from spack.error import SpackError
from spack.spec import Spec
class SpecList:
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
self.name = name
self.yaml_list = yaml_list[:] if yaml_list is not None else []
# Expansions can be expensive to compute and difficult to keep updated
# We cache results and invalidate when self.yaml_list changes
self.specs_as_yaml_list = expanded_list or []
self._constraints = None
self._specs: Optional[List[Spec]] = None
@property
def is_matrix(self):
for item in self.specs_as_yaml_list:
if isinstance(item, dict):
return True
return False
@property
def specs_as_constraints(self):
if self._constraints is None:
constraints = []
for item in self.specs_as_yaml_list:
if isinstance(item, dict): # matrix of specs
constraints.extend(_expand_matrix_constraints(item))
else: # individual spec
constraints.append([Spec(item)])
self._constraints = constraints
return self._constraints
@property
def specs(self) -> List[Spec]:
if self._specs is None:
specs: List[Spec] = []
# This could be slightly faster done directly from yaml_list,
# but this way is easier to maintain.
for constraint_list in self.specs_as_constraints:
spec = constraint_list[0].copy()
for const in constraint_list[1:]:
spec.constrain(const)
specs.append(spec)
self._specs = specs
return self._specs
def add(self, spec: Spec):
spec_str = str(spec)
self.yaml_list.append(spec_str)
# expanded list can be updated without invalidation
if self.specs_as_yaml_list is not None:
self.specs_as_yaml_list.append(spec_str)
# Invalidate cache variables when we change the list
self._constraints = None
self._specs = None
def remove(self, spec):
# Get spec to remove from list
remove = [
s
for s in self.yaml_list
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
]
if not remove:
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
msg += f"Either {spec} is not in {self.name} or {spec} is "
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
# Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
self.specs_as_yaml_list.remove(item)
# invalidate cache variables when we change the list
self._constraints = None
self._specs = None
def extend(self, other: "SpecList", copy_reference=True) -> None:
self.yaml_list.extend(other.yaml_list)
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
self._constraints = None
self._specs = None
def __len__(self):
return len(self.specs)
def __getitem__(self, key):
return self.specs[key]
def __iter__(self):
return iter(self.specs)
def _expand_matrix_constraints(matrix_config):
# recurse so we can handle nested matrices
expanded_rows = []
for row in matrix_config["matrix"]:
new_row = []
for r in row:
if isinstance(r, dict):
# Flatten the nested matrix into a single row of constraints
new_row.extend(
[
[" ".join([str(c) for c in expanded_constraint_list])]
for expanded_constraint_list in _expand_matrix_constraints(r)
]
)
else:
new_row.append([r])
expanded_rows.append(new_row)
excludes = matrix_config.get("exclude", []) # only compute once
sigil = matrix_config.get("sigil", "")
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
# This method is best effort, so all existing variants will be
# converted before any error is raised.
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.spec.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
# Resolve abstract hashes for exclusion criteria
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
continue
if sigil:
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
# Add to list of constraints
results.append(flat_combo)
return results
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:
item["sigil"] = sigil
return item
else:
return sigil + item
class Definition(NamedTuple):
name: str
yaml_list: List[Union[str, Dict]]
when: Optional[str]
class SpecListParser:
"""Parse definitions and user specs from data in environments"""
def __init__(self):
self.definitions: Dict[str, SpecList] = {}
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
definitions_from_yaml: Dict[str, List[Definition]] = {}
for item in data:
value = self._parse_yaml_definition(item)
definitions_from_yaml.setdefault(value.name, []).append(value)
self.definitions = {}
self._build_definitions(definitions_from_yaml)
return self.definitions
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
definition = Definition(name=name, yaml_list=yaml_list, when=None)
return self._speclist_from_definitions(name, [definition])
def _parse_yaml_definition(self, yaml_entry) -> Definition:
when_string = yaml_entry.get("when")
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
attributes = ", ".join(x for x in yaml_entry if x != "when")
error_msg = f"definition must have a single attribute, got many: {attributes}"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
for name, yaml_list in yaml_entry.items():
if name == "when":
continue
return Definition(name=name, yaml_list=yaml_list, when=when_string)
# If we are here, it means only "when" is in the entry
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
error_msg = "definition must have a single attribute, got none"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
for name, definitions in definitions_from_yaml.items():
self.definitions[name] = self._speclist_from_definitions(name, definitions)
def _speclist_from_definitions(self, name, definitions) -> SpecList:
combined_yaml_list = []
for def_part in definitions:
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
continue
combined_yaml_list.extend(def_part.yaml_list)
expanded_list = self._expand_yaml_list(combined_yaml_list)
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
def _expand_yaml_list(self, raw_yaml_list):
result = []
for item in raw_yaml_list:
if isinstance(item, str) and item.startswith("$"):
result.extend(self._expand_reference(item))
continue
value = item
if isinstance(item, dict):
value = self._expand_yaml_matrix(item)
result.append(value)
return result
def _expand_reference(self, item: str):
sigil, name = "", item[1:]
if name.startswith("^") or name.startswith("%"):
sigil, name = name[0], name[1:]
if name not in self.definitions:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
error_msg = f"trying to expand the name '{name}', which is not defined yet"
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
value = self.definitions[name].specs_as_yaml_list
if not sigil:
return value
return [_sigilify(x, sigil) for x in value]
def _expand_yaml_matrix(self, matrix_yaml):
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
if extra_attributes:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
if "matrix" not in matrix_yaml:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
error_msg = "matrix is missing the 'matrix' attribute"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
# Assume data has been validated against the YAML schema
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
if "exclude" in matrix_yaml:
result["exclude"] = matrix_yaml["exclude"]
return result
class SpecListError(SpackError):
"""Error class for all errors related to SpecList objects."""
class UndefinedReferenceError(SpecListError):
"""Error class for undefined references in Spack stacks."""
class InvalidSpecConstraintError(SpecListError):
"""Error class for invalid spec constraints at concretize time."""

View File

@@ -27,14 +27,11 @@
import os
import re
import shutil
import sys
import time
import urllib.error
import urllib.parse
import urllib.request
import urllib.response
from pathlib import PurePath
from typing import Callable, List, Mapping, Optional
from typing import List, Optional
import llnl.url
import llnl.util
@@ -222,114 +219,6 @@ def mirror_id(self):
"""BundlePackages don't have a mirror id."""
def _format_speed(total_bytes: int, elapsed: float) -> str:
"""Return a human-readable average download speed string."""
elapsed = 1 if elapsed <= 0 else elapsed # avoid divide by zero
speed = total_bytes / elapsed
if speed >= 1e9:
return f"{speed / 1e9:6.1f} GB/s"
elif speed >= 1e6:
return f"{speed / 1e6:6.1f} MB/s"
elif speed >= 1e3:
return f"{speed / 1e3:6.1f} KB/s"
return f"{speed:6.1f} B/s"
def _format_bytes(total_bytes: int) -> str:
"""Return a human-readable total bytes string."""
if total_bytes >= 1e9:
return f"{total_bytes / 1e9:7.2f} GB"
elif total_bytes >= 1e6:
return f"{total_bytes / 1e6:7.2f} MB"
elif total_bytes >= 1e3:
return f"{total_bytes / 1e3:7.2f} KB"
return f"{total_bytes:7.2f} B"
class FetchProgress:
#: Characters to rotate in the spinner.
spinner = ["|", "/", "-", "\\"]
def __init__(
self,
total_bytes: Optional[int] = None,
enabled: bool = True,
get_time: Callable[[], float] = time.time,
) -> None:
"""Initialize a FetchProgress instance.
Args:
total_bytes: Total number of bytes to download, if known.
enabled: Whether to print progress information.
get_time: Function to get the current time."""
#: Number of bytes downloaded so far.
self.current_bytes = 0
#: Delta time between progress prints
self.delta = 0.1
#: Whether to print progress information.
self.enabled = enabled
#: Function to get the current time.
self.get_time = get_time
#: Time of last progress print to limit output
self.last_printed = 0.0
#: Time of start of download
self.start_time = get_time() if enabled else 0.0
#: Total number of bytes to download, if known.
self.total_bytes = total_bytes if total_bytes and total_bytes > 0 else 0
#: Index of spinner character to print (used if total bytes is unknown)
self.index = 0
@classmethod
def from_headers(
cls,
headers: Mapping[str, str],
enabled: bool = True,
get_time: Callable[[], float] = time.time,
) -> "FetchProgress":
"""Create a FetchProgress instance from HTTP headers."""
# headers.get is case-insensitive if it's from a HTTPResponse object.
content_length = headers.get("Content-Length")
try:
total_bytes = int(content_length) if content_length else None
except ValueError:
total_bytes = None
return cls(total_bytes=total_bytes, enabled=enabled, get_time=get_time)
def advance(self, num_bytes: int, out=sys.stdout) -> None:
if not self.enabled:
return
self.current_bytes += num_bytes
self.print(out=out)
def print(self, final: bool = False, out=sys.stdout) -> None:
if not self.enabled:
return
current_time = self.get_time()
if self.last_printed + self.delta < current_time or final:
self.last_printed = current_time
# print a newline if this is the final update
maybe_newline = "\n" if final else ""
# if we know the total bytes, show a percentage, otherwise a spinner
if self.total_bytes > 0:
percentage = min(100 * self.current_bytes / self.total_bytes, 100.0)
percent_or_spinner = f"[{percentage:3.0f}%] "
else:
# only show the spinner if we are not at 100%
if final:
percent_or_spinner = "[100%] "
else:
percent_or_spinner = f"[ {self.spinner[self.index]} ] "
self.index = (self.index + 1) % len(self.spinner)
print(
f"\r {percent_or_spinner}{_format_bytes(self.current_bytes)} "
f"@ {_format_speed(self.current_bytes, current_time - self.start_time)}"
f"{maybe_newline}",
end="",
flush=True,
file=out,
)
@fetcher
class URLFetchStrategy(FetchStrategy):
"""URLFetchStrategy pulls source code from a URL for an archive, check the
@@ -427,7 +316,7 @@ def _check_headers(self, headers):
tty.warn(msg)
@_needs_stage
def _fetch_urllib(self, url, chunk_size=65536):
def _fetch_urllib(self, url):
save_file = self.stage.save_filename
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
@@ -438,15 +327,8 @@ def _fetch_urllib(self, url, chunk_size=65536):
try:
response = web_util.urlopen(request)
tty.msg(f"Fetching {url}")
progress = FetchProgress.from_headers(response.headers, enabled=sys.stdout.isatty())
with open(save_file, "wb") as f:
while True:
chunk = response.read(chunk_size)
if not chunk:
break
f.write(chunk)
progress.advance(len(chunk))
progress.print(final=True)
shutil.copyfileobj(response, f)
except OSError as e:
# clean up archive on failure.
if self.archive_file:

View File

@@ -399,9 +399,10 @@ def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
"""
import spack.build_environment # avoid circular dependency
spack.build_environment.start_build_process(
ph = spack.build_environment.start_build_process(
self.pkg, test_process, kwargs, timeout=timeout
)
spack.build_environment.ProcessHandle.complete(ph)
def parts(self) -> int:
"""The total number of (checked) test parts."""

File diff suppressed because it is too large Load Diff

View File

@@ -20,7 +20,6 @@
import signal
import subprocess as sp
import sys
import tempfile
import traceback
import warnings
from typing import List, Tuple
@@ -42,7 +41,6 @@
import spack.paths
import spack.platforms
import spack.repo
import spack.solver.asp
import spack.spec
import spack.store
import spack.util.debug
@@ -1048,10 +1046,6 @@ def main(argv=None):
try:
return _main(argv)
except spack.solver.asp.OutputDoesNotSatisfyInputError as e:
_handle_solver_bug(e)
return 1
except spack.error.SpackError as e:
tty.debug(e)
e.die() # gracefully die on any SpackErrors
@@ -1075,45 +1069,5 @@ def main(argv=None):
return 3
def _handle_solver_bug(
e: spack.solver.asp.OutputDoesNotSatisfyInputError, out=sys.stderr, root=None
) -> None:
# when the solver outputs specs that do not satisfy the input and spack is used as a command
# line tool, we dump the incorrect output specs to json so users can upload them in bug reports
wrong_output = [(input, output) for input, output in e.input_to_output if output is not None]
no_output = [input for input, output in e.input_to_output if output is None]
if no_output:
tty.error(
"internal solver error: the following specs were not solved:\n - "
+ "\n - ".join(str(s) for s in no_output),
stream=out,
)
if wrong_output:
msg = (
"internal solver error: the following specs were concretized, but do not satisfy the "
"input:\n - "
+ "\n - ".join(str(s) for s, _ in wrong_output)
+ "\n Please report a bug at https://github.com/spack/spack/issues"
)
# try to write the input/output specs to a temporary directory for bug reports
try:
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
files = []
for i, (input, output) in enumerate(wrong_output, start=1):
in_file = os.path.join(tmpdir, f"input-{i}.json")
out_file = os.path.join(tmpdir, f"output-{i}.json")
files.append(in_file)
files.append(out_file)
with open(in_file, "w", encoding="utf-8") as f:
input.to_json(f)
with open(out_file, "w", encoding="utf-8") as f:
output.to_json(f)
msg += " and attach the following files:\n - " + "\n - ".join(files)
except Exception:
msg += "."
tty.error(msg, stream=out)
class SpackCommandError(Exception):
"""Raised when SpackCommand execution fails."""

View File

@@ -162,7 +162,6 @@ class tty:
configure: Executable
make_jobs: int
make: MakeExecutable
nmake: Executable
ninja: MakeExecutable
python_include: str
python_platlib: str

View File

@@ -14,6 +14,7 @@
import functools
import glob
import hashlib
import importlib
import io
import os
import re
@@ -27,7 +28,7 @@
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import ClassProperty, classproperty, memoized
from llnl.util.lang import classproperty, memoized
import spack.config
import spack.dependency
@@ -47,7 +48,6 @@
import spack.url
import spack.util.environment
import spack.util.executable
import spack.util.naming
import spack.util.path
import spack.util.web
import spack.variant
@@ -701,10 +701,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
_verbose = None
#: Package homepage where users can find more information about the package
homepage: ClassProperty[Optional[str]] = None
homepage: Optional[str] = None
#: Default list URL (place to find available versions)
list_url: ClassProperty[Optional[str]] = None
list_url: Optional[str] = None
#: Link depth to which list_url should be searched for new versions
list_depth = 0
@@ -818,12 +818,12 @@ def package_dir(cls):
@classproperty
def module(cls):
"""Module instance that this package class is defined in.
"""Module object (not just the name) that this package is defined in.
We use this to add variables to package modules. This makes
install() methods easier to write (e.g., can call configure())
"""
return sys.modules[cls.__module__]
return importlib.import_module(cls.__module__)
@classproperty
def namespace(cls):
@@ -839,36 +839,26 @@ def fullname(cls):
def fullnames(cls):
"""Fullnames for this package and any packages from which it inherits."""
fullnames = []
for base in cls.__mro__:
if not spack.repo.is_package_module(base.__module__):
for cls in cls.__mro__:
namespace = getattr(cls, "namespace", None)
if namespace:
fullnames.append("%s.%s" % (namespace, cls.name))
if namespace == "builtin":
# builtin packages cannot inherit from other repos
break
fullnames.append(base.fullname)
return fullnames
@classproperty
def name(cls):
"""The name of this package."""
"""The name of this package.
The name of a package is the name of its Python module, without
the containing module names.
"""
if cls._name is None:
# We cannot know the exact package API version, but we can distinguish between v1
# v2 based on the module. We don't want to figure out the exact package API version
# since it requires parsing the repo.yaml.
module = cls.__module__
if module.startswith(spack.repo.PKG_MODULE_PREFIX_V1):
version = (1, 0)
elif module.startswith(spack.repo.PKG_MODULE_PREFIX_V2):
version = (2, 0)
else:
raise ValueError(f"Package {cls.__qualname__} is not a known Spack package")
if version < (2, 0):
# spack.pkg.builtin.package_name.
_, _, pkg_module = module.rpartition(".")
else:
# spack_repo.builtin.packages.package_name.package
pkg_module = module.rsplit(".", 2)[-2]
cls._name = spack.util.naming.pkg_dir_to_pkg_name(pkg_module, version)
cls._name = cls.module.__name__
if "." in cls._name:
cls._name = cls._name[cls._name.rindex(".") + 1 :]
return cls._name
@classproperty

View File

@@ -56,9 +56,8 @@
# read-only things in $spack/var/spack
repos_path = os.path.join(var_path, "repos")
test_repos_path = os.path.join(var_path, "test_repos")
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
packages_path = os.path.join(repos_path, "builtin")
mock_packages_path = os.path.join(repos_path, "builtin.mock")
#
# Writable things in $spack/var/spack

View File

@@ -47,34 +47,40 @@
import spack.util.path
import spack.util.spack_yaml as syaml
PKG_MODULE_PREFIX_V1 = "spack.pkg."
PKG_MODULE_PREFIX_V2 = "spack_repo."
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
ROOT_PYTHON_NAMESPACE = "spack.pkg"
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
def is_package_module(fullname: str) -> bool:
"""Check if the given module is a package module."""
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
def python_package_for_repo(namespace):
"""Returns the full namespace of a repository, given its relative one
For instance:
python_package_for_repo('builtin') == 'spack.pkg.builtin'
Args:
namespace (str): repo namespace
"""
return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
def namespace_from_fullname(fullname: str) -> str:
def namespace_from_fullname(fullname):
"""Return the repository namespace only for the full module name.
For instance:
namespace_from_fullname("spack.pkg.builtin.hdf5") == "builtin"
namespace_from_fullname("spack_repo.x.y.z.packages.pkg_name.package") == "x.y.z"
namespace_from_fullname('spack.pkg.builtin.hdf5') == 'builtin'
Args:
fullname: full name for the Python module
fullname (str): full name for the Python module
"""
if fullname.startswith(PKG_MODULE_PREFIX_V1):
namespace, _, _ = fullname.rpartition(".")
return namespace[len(PKG_MODULE_PREFIX_V1) :]
elif fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package"):
return ".".join(fullname.split(".")[1:-3])
return fullname
namespace, dot, module = fullname.rpartition(".")
prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
if namespace.startswith(prefix_and_dot):
namespace = namespace[len(prefix_and_dot) :]
return namespace
class SpackNamespaceLoader:
@@ -86,14 +92,14 @@ def exec_module(self, module):
class ReposFinder:
"""MetaPathFinder class that loads a Python module corresponding to an API v1 Spack package.
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
Returns a loader based on the inspection of the current repository list.
"""
def __init__(self):
self._repo_init = _path
self._repo: Optional[RepoType] = None
self._repo = None
@property
def current_repository(self):
@@ -121,7 +127,7 @@ def find_spec(self, fullname, python_path, target=None):
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
# Preferred API from https://peps.python.org/pep-0451/
if not fullname.startswith(PKG_MODULE_PREFIX_V1) and fullname != "spack.pkg":
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
return None
loader = self.compute_loader(fullname)
@@ -129,17 +135,16 @@ def find_spec(self, fullname, python_path, target=None):
return None
return importlib.util.spec_from_loader(fullname, loader)
def compute_loader(self, fullname: str):
def compute_loader(self, fullname):
# namespaces are added to repo, and package modules are leaves.
namespace, dot, module_name = fullname.rpartition(".")
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
current_repo = self.current_repository
is_repo_path = isinstance(current_repo, RepoPath)
is_repo_path = isinstance(self.current_repository, RepoPath)
if is_repo_path:
repos = current_repo.repos
repos = self.current_repository.repos
else:
repos = [current_repo]
repos = [self.current_repository]
for repo in repos:
# We are using the namespace of the repo and the repo contains the package
@@ -156,9 +161,7 @@ def compute_loader(self, fullname: str):
# No repo provides the namespace, but it is a valid prefix of
# something in the RepoPath.
if is_repo_path and current_repo.by_namespace.is_prefix(
fullname[len(PKG_MODULE_PREFIX_V1) :]
):
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
return SpackNamespaceLoader()
return None
@@ -176,12 +179,12 @@ def compute_loader(self, fullname: str):
NOT_PROVIDED = object()
def builtin_repo() -> "Repo":
def packages_path():
"""Get the test repo if it is active, otherwise the builtin repo."""
try:
return PATH.get_repo("builtin.mock")
return PATH.get_repo("builtin.mock").packages_path
except UnknownNamespaceError:
return PATH.get_repo("builtin")
return PATH.get_repo("builtin").packages_path
class GitExe:
@@ -189,25 +192,24 @@ class GitExe:
# invocations.
#
# Not using -C as that is not supported for git < 1.8.5.
def __init__(self, packages_path: str):
def __init__(self):
self._git_cmd = spack.util.git.git(required=True)
self.packages_dir = packages_path
def __call__(self, *args, **kwargs) -> str:
with working_dir(self.packages_dir):
return self._git_cmd(*args, **kwargs, output=str)
def __call__(self, *args, **kwargs):
with working_dir(packages_path()):
return self._git_cmd(*args, **kwargs)
def list_packages(rev: str, repo: "Repo") -> List[str]:
def list_packages(rev):
"""List all packages associated with the given revision"""
git = GitExe(repo.packages_path)
git = GitExe()
# git ls-tree does not support ... merge-base syntax, so do it manually
if rev.endswith("..."):
ref = rev.replace("...", "")
rev = git("merge-base", ref, "HEAD").strip()
rev = git("merge-base", ref, "HEAD", output=str).strip()
output = git("ls-tree", "-r", "--name-only", rev)
output = git("ls-tree", "-r", "--name-only", rev, output=str)
# recursively list the packages directory
package_paths = [
@@ -215,54 +217,54 @@ def list_packages(rev: str, repo: "Repo") -> List[str]:
]
# take the directory names with one-level-deep package files
package_names = [
nm.pkg_dir_to_pkg_name(line[0], repo.package_api)
for line in package_paths
if len(line) == 2
]
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
return sorted(set(package_names))
return package_names
def diff_packages(rev1: str, rev2: str, repo: "Repo") -> Tuple[Set[str], Set[str]]:
def diff_packages(rev1, rev2):
"""Compute packages lists for the two revisions and return a tuple
containing all the packages in rev1 but not in rev2 and all the
packages in rev2 but not in rev1."""
p1 = set(list_packages(rev1, repo))
p2 = set(list_packages(rev2, repo))
p1 = set(list_packages(rev1))
p2 = set(list_packages(rev2))
return p1.difference(p2), p2.difference(p1)
def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -> Set[str]:
"""Get packages changed, added, or removed (or any combination of those) since a commit.
def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
"""Show packages changed, added, or removed (or any combination of those)
since a commit.
Arguments:
type: String containing one or more of 'A', 'R', 'C'
rev1: Revision to compare against, default is 'HEAD^'
rev2: Revision to compare to rev1, default is 'HEAD'
type (str): String containing one or more of 'A', 'R', 'C'
rev1 (str): Revision to compare against, default is 'HEAD^'
rev2 (str): Revision to compare to rev1, default is 'HEAD'
Returns:
A set contain names of affected packages.
"""
lower_type = type.lower()
if not re.match("^[arc]*$", lower_type):
tty.die(
f"Invalid change type: '{type}'. "
"Can contain only A (added), R (removed), or C (changed)"
"Invald change type: '%s'." % type,
"Can contain only A (added), R (removed), or C (changed)",
)
removed, added = diff_packages(rev1, rev2, repo)
removed, added = diff_packages(rev1, rev2)
git = GitExe(repo.packages_path)
out = git("diff", "--relative", "--name-only", rev1, rev2).strip()
git = GitExe()
out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
lines = [] if not out else re.split(r"\s+", out)
changed: Set[str] = set()
changed = set()
for path in lines:
dir_name, _, _ = path.partition("/")
pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api)
pkg_name, _, _ = path.partition("/")
if pkg_name not in added and pkg_name not in removed:
changed.add(pkg_name)
packages: Set[str] = set()
packages = set()
if "a" in lower_type:
packages |= added
if "r" in lower_type:
@@ -273,14 +275,14 @@ def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -
return packages
def add_package_to_git_stage(packages: List[str], repo: "Repo") -> None:
def add_package_to_git_stage(packages):
"""add a package to the git stage with `git add`"""
git = GitExe(repo.packages_path)
git = GitExe()
for pkg_name in packages:
filename = PATH.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die(f"No such package: {pkg_name}. Path does not exist:", filename)
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
git("add", filename)
@@ -350,10 +352,9 @@ class FastPackageChecker(collections.abc.Mapping):
#: Global cache, reused by every instance
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
def __init__(self, packages_path: str, package_api: Tuple[int, int]):
def __init__(self, packages_path):
# The path of the repository managed by this instance
self.packages_path = packages_path
self.package_api = package_api
# If the cache we need is not there yet, then build it appropriately
if packages_path not in self._paths_cache:
@@ -378,38 +379,41 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
# Create a dictionary that will store the mapping between a
# package name and its stat info
cache: Dict[str, os.stat_result] = {}
with os.scandir(self.packages_path) as entries:
for entry in entries:
# Construct the file name from the directory
pkg_file = os.path.join(entry.path, package_file_name)
for pkg_name in os.listdir(self.packages_path):
# Skip non-directories in the package root.
pkg_dir = os.path.join(self.packages_path, pkg_name)
try:
sinfo = os.stat(pkg_file)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn(f"Can't read package file {pkg_file}.")
continue
raise e
# If it's not a file, skip it.
if not stat.S_ISREG(sinfo.st_mode):
continue
# Only consider package.py files in directories that are valid module names under
# the current package API
if not nm.valid_module_name(entry.name, self.package_api):
x, y = self.package_api
# Warn about invalid names that look like packages.
if not nm.valid_module_name(pkg_name):
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
tty.warn(
f"Package {pkg_file} cannot be used because `{entry.name}` is not a valid "
f"Spack package module name for Package API v{x}.{y}."
'Skipping package at {0}. "{1}" is not '
"a valid Spack module name.".format(pkg_dir, pkg_name)
)
continue
continue
# Store the stat info by package name.
cache[nm.pkg_dir_to_pkg_name(entry.name, self.package_api)] = sinfo
# Construct the file name from the directory
pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
# Use stat here to avoid lots of calls to the filesystem.
try:
sinfo = os.stat(pkg_file)
except OSError as e:
if e.errno == errno.ENOENT:
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn("Can't read package file %s." % pkg_file)
continue
raise e
# If it's not a file, skip it.
if stat.S_ISDIR(sinfo.st_mode):
continue
# If it is a file, then save the stats under the
# appropriate key
cache[pkg_name] = sinfo
return cache
@@ -684,7 +688,7 @@ def put_first(self, repo: "Repo") -> None:
return
self.repos.insert(0, repo)
self.by_namespace[repo.namespace] = repo
self.by_namespace[repo.full_namespace] = repo
def put_last(self, repo):
"""Add repo last in the search path."""
@@ -696,8 +700,8 @@ def put_last(self, repo):
self.repos.append(repo)
# don't mask any higher-precedence repos with same namespace
if repo.namespace not in self.by_namespace:
self.by_namespace[repo.namespace] = repo
if repo.full_namespace not in self.by_namespace:
self.by_namespace[repo.full_namespace] = repo
def remove(self, repo):
"""Remove a repo from the search path."""
@@ -706,9 +710,10 @@ def remove(self, repo):
def get_repo(self, namespace: str) -> "Repo":
"""Get a repository by namespace."""
if namespace not in self.by_namespace:
full_namespace = python_package_for_repo(namespace)
if full_namespace not in self.by_namespace:
raise UnknownNamespaceError(namespace)
return self.by_namespace[namespace]
return self.by_namespace[full_namespace]
def first_repo(self) -> Optional["Repo"]:
"""Get the first repo in precedence order."""
@@ -816,9 +821,10 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
# If the spec already has a namespace, then return the
# corresponding repo if we know about it.
if namespace:
if namespace not in self.by_namespace:
fullspace = python_package_for_repo(namespace)
if fullspace not in self.by_namespace:
raise UnknownNamespaceError(namespace, name=name)
return self.by_namespace[namespace]
return self.by_namespace[fullspace]
# If there's no namespace, search in the RepoPath.
for repo in self.repos:
@@ -839,15 +845,8 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
return self.repo_for_pkg(spec).get(spec)
def python_paths(self) -> List[str]:
"""Return a list of all the Python paths in the repos."""
return [repo.python_path for repo in self.repos if repo.python_path]
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
"""Find a class for the spec's package and return the class object."""
for p in self.python_paths():
if p not in sys.path:
sys.path.insert(0, p)
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
@autospec
@@ -943,30 +942,6 @@ def _parse_package_api_version(
)
def _validate_and_normalize_subdir(subdir: Any, root: str, package_api: Tuple[int, int]) -> str:
if not isinstance(subdir, str):
raise BadRepoError(f"Invalid subdirectory '{subdir}' in '{root}'. Must be a string")
if package_api < (2, 0):
return subdir # In v1.x we did not validate subdir names
if subdir in (".", ""):
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Use a symlink packages -> . instead"
)
# Otherwise we expect a directory name (not path) that can be used as a Python module.
if os.sep in subdir:
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Expected a directory name, not a path"
)
if not nm.valid_module_name(subdir, package_api):
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Must be a valid Python module name"
)
return subdir
class Repo:
"""Class representing a package repository in the filesystem.
@@ -987,8 +962,6 @@ class Repo:
:py:data:`spack.package_api_version`.
"""
namespace: str
def __init__(
self,
root: str,
@@ -1018,79 +991,32 @@ def check(condition, msg):
# Read configuration and validate namespace
config = self._read_config()
self.package_api = _parse_package_api_version(config)
self.subdirectory = _validate_and_normalize_subdir(
config.get("subdirectory", packages_dir_name), root, self.package_api
)
self.packages_path = os.path.join(self.root, self.subdirectory)
check(
os.path.isdir(self.packages_path),
f"No directory '{self.subdirectory}' found in '{root}'",
"namespace" in config,
f"{os.path.join(root, repo_config_name)} must define a namespace.",
)
# The parent dir of spack_repo/ which should be added to sys.path for api v2.x
self.python_path: Optional[str] = None
if self.package_api < (2, 0):
check(
"namespace" in config,
f"{os.path.join(root, repo_config_name)} must define a namespace.",
)
self.namespace = config["namespace"]
# Note: for Package API v1.x the namespace validation always had bugs, which won't be
# fixed for compatibility reasons. The regex is missing "$" at the end, and it claims
# to test for valid identifiers, but fails to split on `.` first.
check(
isinstance(self.namespace, str)
and re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
"Namespaces must be valid python identifiers separated by '.'",
)
else:
# From Package API v2.0 the namespace follows from the directory structure.
check(
f"{os.sep}spack_repo{os.sep}" in self.root,
f"Invalid repository path '{self.root}'. "
f"Path must contain 'spack_repo{os.sep}'",
)
derived_namespace = self.root.rpartition(f"spack_repo{os.sep}")[2].replace(os.sep, ".")
if "namespace" in config:
self.namespace = config["namespace"]
check(
isinstance(self.namespace, str) and self.namespace == derived_namespace,
f"Namespace '{self.namespace}' should be {derived_namespace} or omitted in "
f"{os.path.join(root, repo_config_name)}",
)
else:
self.namespace = derived_namespace
# strip the namespace directories from the root path to get the python path
# e.g. /my/pythonpath/spack_repo/x/y/z -> /my/pythonpath
python_path = self.root
for _ in self.namespace.split("."):
python_path = os.path.dirname(python_path)
self.python_path = os.path.dirname(python_path)
# check that all subdirectories are valid module names
check(
all(nm.valid_module_name(x, self.package_api) for x in self.namespace.split(".")),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'",
)
self.namespace: str = config["namespace"]
check(
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
"Namespaces must be valid python identifiers separated by '.'",
)
# Set up 'full_namespace' to include the super-namespace
if self.package_api < (2, 0):
self.full_namespace = f"{PKG_MODULE_PREFIX_V1}{self.namespace}"
elif self.subdirectory == ".":
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}"
else:
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}.{self.subdirectory}"
self.full_namespace = python_package_for_repo(self.namespace)
# Keep name components around for checking prefixes.
self._names = self.full_namespace.split(".")
packages_dir: str = config.get("subdirectory", packages_dir_name)
self.packages_path = os.path.join(self.root, packages_dir)
check(
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
)
self.package_api = _parse_package_api_version(config)
# Class attribute overrides by package name
self.overrides = overrides or {}
@@ -1104,36 +1030,27 @@ def check(condition, msg):
self._repo_index: Optional[RepoIndex] = None
self._cache = cache
@property
def package_api_str(self) -> str:
return f"v{self.package_api[0]}.{self.package_api[1]}"
def finder(self, value: RepoPath) -> None:
self._finder = value
def real_name(self, import_name: str) -> Optional[str]:
"""Allow users to import Spack packages using Python identifiers.
In Package API v1.x, there was no canonical module name for a package, and package's dir
was not necessarily a valid Python module name. For that case we have to guess the actual
package directory. From Package API v2.0 there is a one-to-one mapping between Spack
package names and Python module names, so there is no guessing.
A python identifier might map to many different Spack package
names due to hyphen/underscore ambiguity.
For Packge API v1.x we support the following one-to-many mappings:
num3proxy -> 3proxy
Easy example:
num3proxy -> 3proxy
Ambiguous:
foo_bar -> foo_bar, foo-bar
More ambiguous:
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
"""
if self.package_api >= (2, 0):
if nm.pkg_dir_to_pkg_name(import_name, package_api=self.package_api) in self:
return import_name
return None
if import_name in self:
return import_name
# For v1 generate the possible package names from a module name, and return the first
# package name that exists in this repo.
options = nm.possible_spack_module_names(import_name)
try:
options.remove(import_name)
@@ -1266,9 +1183,7 @@ def extensions_for(
def dirname_for_package_name(self, pkg_name: str) -> str:
"""Given a package name, get the directory containing its package.py file."""
_, unqualified_name = self.partition_package_name(pkg_name)
return os.path.join(
self.packages_path, nm.pkg_name_to_pkg_dir(unqualified_name, self.package_api)
)
return os.path.join(self.packages_path, unqualified_name)
def filename_for_package_name(self, pkg_name: str) -> str:
"""Get the filename for the module we should load for a particular
@@ -1285,7 +1200,7 @@ def filename_for_package_name(self, pkg_name: str) -> str:
@property
def _pkg_checker(self) -> FastPackageChecker:
if self._fast_package_checker is None:
self._fast_package_checker = FastPackageChecker(self.packages_path, self.package_api)
self._fast_package_checker = FastPackageChecker(self.packages_path)
return self._fast_package_checker
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
@@ -1297,9 +1212,7 @@ def all_package_names(self, include_virtuals: bool = False) -> List[str]:
def package_path(self, name: str) -> str:
"""Get path to package.py file for this repo."""
return os.path.join(
self.packages_path, nm.pkg_name_to_pkg_dir(name, self.package_api), package_file_name
)
return os.path.join(self.packages_path, name, package_file_name)
def all_package_paths(self) -> Generator[str, None, None]:
for name in self.all_package_names():
@@ -1357,19 +1270,15 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
package. Then extracts the package class from the module
according to Spack's naming convention.
"""
_, pkg_name = self.partition_package_name(pkg_name)
fullname = f"{self.full_namespace}.{nm.pkg_name_to_pkg_dir(pkg_name, self.package_api)}"
if self.package_api >= (2, 0):
fullname += ".package"
namespace, pkg_name = self.partition_package_name(pkg_name)
class_name = nm.mod_to_class(pkg_name)
fullname = f"{self.full_namespace}.{pkg_name}"
class_name = nm.pkg_name_to_class_name(pkg_name)
if self.python_path and self.python_path not in sys.path:
sys.path.insert(0, self.python_path)
try:
with REPOS_FINDER.switch_repo(self._finder or self):
module = importlib.import_module(fullname)
except ImportError as e:
raise UnknownPackageError(fullname) from e
except ImportError:
raise UnknownPackageError(fullname)
except Exception as e:
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
raise RepoError(msg) from e
@@ -1460,71 +1369,46 @@ def partition_package_name(pkg_name: str) -> Tuple[str, str]:
return namespace, pkg_name
def get_repo_yaml_dir(
root: str, namespace: Optional[str], package_api: Tuple[int, int]
) -> Tuple[str, str]:
"""Returns the directory where repo.yaml is located and the effective namespace."""
if package_api < (2, 0):
namespace = namespace or os.path.basename(root)
# This ad-hoc regex is left for historical reasons, and should not have a breaking change.
if not re.match(r"\w[\.\w-]*", namespace):
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace.")
return root, namespace
# Package API v2 has <root>/spack_repo/<namespace>/<subdir> structure and requires a namespace
if namespace is None:
raise InvalidNamespaceError("Namespace must be provided.")
# if namespace has dots those translate to subdirs of further namespace packages.
namespace_components = namespace.split(".")
if not all(nm.valid_module_name(n, package_api=package_api) for n in namespace_components):
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace." % namespace)
return os.path.join(root, "spack_repo", *namespace_components), namespace
def create_repo(
root,
namespace: Optional[str] = None,
subdir: str = packages_dir_name,
package_api: Tuple[int, int] = spack.package_api_version,
) -> Tuple[str, str]:
def create_repo(root, namespace=None, subdir=packages_dir_name):
"""Create a new repository in root with the specified namespace.
If the namespace is not provided, use basename of root.
Return the canonicalized path and namespace of the created repository.
"""
root = spack.util.path.canonicalize_path(root)
repo_yaml_dir, namespace = get_repo_yaml_dir(os.path.abspath(root), namespace, package_api)
if not namespace:
namespace = os.path.basename(root)
existed = True
try:
dir_entry = next(os.scandir(repo_yaml_dir), None)
except OSError as e:
if e.errno == errno.ENOENT:
existed = False
dir_entry = None
else:
raise BadRepoError(f"Cannot create new repo in {root}: {e}")
if not re.match(r"\w[\.\w-]*", namespace):
raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
if dir_entry is not None:
raise BadRepoError(f"Cannot create new repo in {root}: directory is not empty.")
existed = False
if os.path.exists(root):
if os.path.isfile(root):
raise BadRepoError("File %s already exists and is not a directory" % root)
elif os.path.isdir(root):
if not os.access(root, os.R_OK | os.W_OK):
raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
if os.listdir(root):
raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
existed = True
config_path = os.path.join(repo_yaml_dir, repo_config_name)
subdir = _validate_and_normalize_subdir(subdir, root, package_api)
packages_path = os.path.join(repo_yaml_dir, subdir)
full_path = os.path.realpath(root)
parent = os.path.dirname(full_path)
if not os.access(parent, os.R_OK | os.W_OK):
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
try:
config_path = os.path.join(root, repo_config_name)
packages_path = os.path.join(root, subdir)
fs.mkdirp(packages_path)
with open(config_path, "w", encoding="utf-8") as config:
config.write("repo:\n")
config.write(f" namespace: '{namespace}'\n")
if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n")
x, y = package_api
x, y = spack.package_api_version
config.write(f" api: v{x}.{y}\n")
except OSError as e:
@@ -1537,27 +1421,22 @@ def create_repo(
raise BadRepoError(
"Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
) from e
)
return repo_yaml_dir, namespace
return full_path, namespace
def from_path(path: str) -> Repo:
def from_path(path: str) -> "Repo":
"""Returns a repository from the path passed as input. Injects the global misc cache."""
return Repo(path, cache=spack.caches.MISC_CACHE)
def create_or_construct(
root: str,
namespace: Optional[str] = None,
package_api: Tuple[int, int] = spack.package_api_version,
) -> Repo:
def create_or_construct(path, namespace=None):
"""Create a repository, or just return a Repo if it already exists."""
repo_yaml_dir, _ = get_repo_yaml_dir(root, namespace, package_api)
if not os.path.exists(repo_yaml_dir):
fs.mkdirp(root)
create_repo(root, namespace=namespace, package_api=package_api)
return from_path(repo_yaml_dir)
if not os.path.exists(path):
fs.mkdirp(path)
create_repo(path, namespace)
return from_path(path)
def _path(configuration=None):
@@ -1635,10 +1514,8 @@ class MockRepositoryBuilder:
"""Build a mock repository in a directory"""
def __init__(self, root_directory, namespace=None):
namespace = namespace or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
repo_root = os.path.join(root_directory, namespace)
os.mkdir(repo_root)
self.root, self.namespace = create_repo(repo_root, namespace)
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
self.root, self.namespace = create_repo(str(root_directory), namespace)
def add_package(self, name, dependencies=None):
"""Create a mock package in the repository, using a Jinja2 template.
@@ -1650,7 +1527,7 @@ def add_package(self, name, dependencies=None):
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
"""
dependencies = dependencies or []
context = {"cls_name": nm.pkg_name_to_class_name(name), "dependencies": dependencies}
context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
text = template.render(context)
package_py = self.recipe_filename(name)
@@ -1662,10 +1539,8 @@ def remove(self, name):
package_py = self.recipe_filename(name)
shutil.rmtree(os.path.dirname(package_py))
def recipe_filename(self, name: str):
return os.path.join(
self.root, "packages", nm.pkg_name_to_pkg_dir(name, package_api=(2, 0)), "package.py"
)
def recipe_filename(self, name):
return os.path.join(self.root, "packages", name, "package.py")
class RepoError(spack.error.SpackError):
@@ -1715,10 +1590,7 @@ def __init__(self, name, repo=None):
# We need to compare the base package name
pkg_name = name.rsplit(".", 1)[-1]
try:
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
except Exception:
similar = []
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
if 1 <= len(similar) <= 5:
long_msg += "\n\nDid you mean one of the following packages?\n "

View File

@@ -1,276 +1,131 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tools to produce reports of spec installations"""
"""Hooks to produce reports of spec installations"""
import collections
import contextlib
import functools
import gzip
import os
import time
import traceback
from typing import Any, Callable, Dict, List, Type
import llnl.util.lang
import llnl.util.filesystem as fs
import spack.build_environment
import spack.install_test
import spack.installer
import spack.package_base
import spack.reporters
import spack.spec
import spack.util.spack_json as sjson
reporter = None
report_file = None
Property = collections.namedtuple("Property", ["name", "value"])
class InfoCollector:
"""Base class for context manager objects that collect information during the execution of
certain package functions.
class Record(dict):
def __getattr__(self, name):
# only called if no attribute exists
if name in self:
return self[name]
raise AttributeError(f"RequestRecord for {self.name} has no attribute {name}")
The data collected is available through the ``specs`` attribute once exited, and it's
organized as a list where each item represents the installation of one spec.
"""
wrap_class: Type
do_fn: str
_backup_do_fn: Callable
input_specs: List[spack.spec.Spec]
specs: List[Dict[str, Any]]
def __init__(self, wrap_class: Type, do_fn: str, specs: List[spack.spec.Spec]):
#: Class for which to wrap a function
self.wrap_class = wrap_class
#: Action to be reported on
self.do_fn = do_fn
#: Backup of the wrapped class function
self._backup_do_fn = getattr(self.wrap_class, do_fn)
#: Specs that will be acted on
self.input_specs = specs
#: This is where we record the data that will be included in our report
self.specs: List[Dict[str, Any]] = []
def fetch_log(self, pkg: spack.package_base.PackageBase) -> str:
"""Return the stdout log associated with the function being monitored
Args:
pkg: package under consideration
"""
raise NotImplementedError("must be implemented by derived classes")
def extract_package_from_signature(self, instance, *args, **kwargs):
"""Return the package instance, given the signature of the wrapped function."""
raise NotImplementedError("must be implemented by derived classes")
def __enter__(self):
# Initialize the spec report with the data that is available upfront.
Property = collections.namedtuple("Property", ["name", "value"])
for input_spec in self.input_specs:
name_fmt = "{0}_{1}"
name = name_fmt.format(input_spec.name, input_spec.dag_hash(length=7))
spec_record = {
"name": name,
"nerrors": None,
"nfailures": None,
"npackages": None,
"time": None,
"timestamp": time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()),
"properties": [],
"packages": [],
}
spec_record["properties"].append(Property("architecture", input_spec.architecture))
self.init_spec_record(input_spec, spec_record)
self.specs.append(spec_record)
def gather_info(wrapped_fn):
"""Decorates a function to gather useful information for a CI report."""
@functools.wraps(wrapped_fn)
def wrapper(instance, *args, **kwargs):
pkg = self.extract_package_from_signature(instance, *args, **kwargs)
package = {
"name": pkg.name,
"id": pkg.spec.dag_hash(),
"elapsed_time": None,
"result": None,
"message": None,
"installed_from_binary_cache": False,
}
# Append the package to the correct spec report. In some
# cases it may happen that a spec that is asked to be
# installed explicitly will also be installed as a
# dependency of another spec. In this case append to both
# spec reports.
for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7))
try:
item = next((x for x in self.specs if x["name"] == name))
item["packages"].append(package)
except StopIteration:
pass
start_time = time.time()
try:
value = wrapped_fn(instance, *args, **kwargs)
package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
self.on_success(pkg, kwargs, package)
return value
except spack.build_environment.InstallError as exc:
# An InstallError is considered a failure (the recipe
# didn't work correctly)
package["result"] = "failure"
package["message"] = exc.message or "Installation failure"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = exc.traceback
raise
except (Exception, BaseException) as exc:
# Everything else is an error (the installation
# failed outside of the child process)
package["result"] = "error"
package["message"] = str(exc) or "Unknown error"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = traceback.format_exc()
raise
finally:
package["elapsed_time"] = time.time() - start_time
return wrapper
setattr(self.wrap_class, self.do_fn, gather_info(getattr(self.wrap_class, self.do_fn)))
def on_success(self, pkg: spack.package_base.PackageBase, kwargs, package_record):
"""Add additional properties on function call success."""
raise NotImplementedError("must be implemented by derived classes")
def init_spec_record(self, input_spec: spack.spec.Spec, record):
"""Add additional entries to a spec record when entering the collection context."""
def __exit__(self, exc_type, exc_val, exc_tb):
# Restore the original method in PackageBase
setattr(self.wrap_class, self.do_fn, self._backup_do_fn)
for spec in self.specs:
spec["npackages"] = len(spec["packages"])
spec["nfailures"] = len([x for x in spec["packages"] if x["result"] == "failure"])
spec["nerrors"] = len([x for x in spec["packages"] if x["result"] == "error"])
spec["time"] = sum(float(x["elapsed_time"]) for x in spec["packages"])
def __setattr__(self, name, value):
if name.startswith("_"):
super().__setattr__(name, value)
else:
self[name] = value
class BuildInfoCollector(InfoCollector):
"""Collect information for the PackageInstaller._install_task method.
class RequestRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self.name = spec.name
self.errors = None
self.nfailures = None
self.npackages = None
self.time = None
self.timestamp = time.strftime("%a, d %b %Y %H:%M:%S", time.gmtime())
self.properties = [
Property("architecture", spec.architecture),
# Property("compiler", spec.compiler),
]
self.packages = []
Args:
specs: specs whose install information will be recorded
"""
def skip_installed(self):
for dep in filter(lambda x: x.installed, self._spec.traverse()):
record = InstallRecord(dep)
record.skip(msg="Spec already installed")
self.packages.append(record)
def __init__(self, specs: List[spack.spec.Spec]):
super().__init__(spack.installer.PackageInstaller, "_install_task", specs)
def append_record(self, record):
self.packages.append(record)
def init_spec_record(self, input_spec, record):
# Check which specs are already installed and mark them as skipped
for dep in filter(lambda x: x.installed, input_spec.traverse()):
package = {
"name": dep.name,
"id": dep.dag_hash(),
"elapsed_time": "0.0",
"result": "skipped",
"message": "Spec already installed",
}
record["packages"].append(package)
def summarize(self):
self.npackages = len(self.packages)
self.nfailures = len([r for r in self.packages if r.result == "failure"])
self.nerrors = len([r for r in self.packages if r.result == "error"])
self.time = sum(float(r.elapsed_time or 0.0) for r in self.packages)
def on_success(self, pkg, kwargs, package_record):
package_record["result"] = "success"
def fetch_log(self, pkg):
class SpecRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self._package = spec.package
self._start_time = None
self.name = spec.name
self.id = spec.dag_hash()
self.elapsed_time = None
def start(self):
self._start_time = time.time()
def skip(self, msg):
self.result = "skipped"
self.elapsed_time = 0.0
self.message = msg
class InstallRecord(SpecRecord):
def __init__(self, spec):
super().__init__(spec)
self.result = None
self.message = None
self.installed_from_binary_cache = None
def fetch_log(self):
try:
if os.path.exists(pkg.install_log_path):
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
if os.path.exists(self._package.install_log_path):
stream = gzip.open(self._package.install_log_path, "rt", encoding="utf-8")
else:
stream = open(pkg.log_path, encoding="utf-8")
stream = open(self._package.log_path, encoding="utf-8")
with stream as f:
return f.read()
except OSError:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return f"Cannot open log for {self._spec.cshort_spec}"
def extract_package_from_signature(self, instance, *args, **kwargs):
return args[0].pkg
class TestInfoCollector(InfoCollector):
"""Collect information for the PackageBase.do_test method.
Args:
specs: specs whose install information will be recorded
record_directory: record directory for test log paths
"""
dir: str
def __init__(self, specs: List[spack.spec.Spec], record_directory: str):
super().__init__(spack.package_base.PackageBase, "do_test", specs)
self.dir = record_directory
def on_success(self, pkg, kwargs, package_record):
externals = kwargs.get("externals", False)
skip_externals = pkg.spec.external and not externals
if skip_externals:
package_record["result"] = "skipped"
package_record["result"] = "success"
def fetch_log(self, pkg: spack.package_base.PackageBase):
log_file = os.path.join(self.dir, spack.install_test.TestSuite.test_log_name(pkg.spec))
def fetch_time(self):
try:
with open(log_file, "r", encoding="utf-8") as stream:
return "".join(stream.readlines())
with open(self._package.times_log_path, "r", encoding="utf-8") as f:
data = sjson.load(f.read())
return data["total"]
except Exception:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return None
def extract_package_from_signature(self, instance, *args, **kwargs):
return instance
def succeed(self):
self.result = "success"
self.stdout = self.fetch_log()
self.installed_from_binary_cache = self._package.installed_from_binary_cache
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time
@contextlib.contextmanager
def build_context_manager(
reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
):
"""Decorate a package to generate a report after the installation function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
"""
collector = BuildInfoCollector(specs)
try:
with collector:
yield
finally:
reporter.build_report(filename, specs=collector.specs)
@contextlib.contextmanager
def test_context_manager(
reporter: spack.reporters.Reporter,
filename: str,
specs: List[spack.spec.Spec],
raw_logs_dir: str,
):
"""Decorate a package to generate a report after the test function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
raw_logs_dir: record directory for test log paths
"""
collector = TestInfoCollector(specs, raw_logs_dir)
try:
with collector:
yield
finally:
reporter.test_report(filename, specs=collector.specs)
def fail(self, exc):
if isinstance(exc, spack.build_environment.InstallError):
self.result = "failure"
self.message = exc.message or "Installation failure"
self.exception = exc.traceback
else:
self.result = "error"
self.message = str(exc) or "Unknown error"
self.exception = traceback.format_exc()
self.stdout = self.fetch_log() + self.message
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time

View File

@@ -278,6 +278,8 @@ def build_report(self, report_dir, specs):
self.multiple_packages = False
num_packages = 0
for spec in specs:
spec.summarize()
# Do not generate reports for packages that were installed
# from the binary cache.
spec["packages"] = [
@@ -362,6 +364,8 @@ def test_report(self, report_dir, specs):
"""Generate reports for each package in each spec."""
tty.debug("Processing test report")
for spec in specs:
spec.summarize()
duration = 0
if "time" in spec:
duration = int(spec["time"])

View File

@@ -17,12 +17,16 @@ def concretization_report(self, filename, msg):
pass
def build_report(self, filename, specs):
for spec in specs:
spec.summarize()
if not (os.path.splitext(filename))[1]:
# Ensure the report name will end with the proper extension;
# otherwise, it currently defaults to the "directory" name.
filename = filename + ".xml"
report_data = {"specs": specs}
with open(filename, "w", encoding="utf-8") as f:
env = spack.tengine.make_environment()
t = env.get_template(self._jinja_template)

View File

@@ -287,33 +287,9 @@ def specify(spec):
return spack.spec.Spec(spec)
def remove_facts(
*to_be_removed: str,
) -> Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]:
"""Returns a transformation function that removes facts from the input list of facts."""
def _remove(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
return list(filter(lambda x: x.args[0] not in to_be_removed, facts))
return _remove
def remove_build_deps(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
build_deps = {x.args[2]: x.args[1] for x in facts if x.args[0] == "depends_on"}
result = []
for x in facts:
current_name = x.args[1]
if current_name in build_deps:
x.name = "build_requirement"
result.append(fn.attr("build_requirement", build_deps[current_name], x))
continue
if x.args[0] == "depends_on":
continue
result.append(x)
return result
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
def all_libcs() -> Set[spack.spec.Spec]:
@@ -1311,8 +1287,12 @@ def on_model(model):
result.raise_if_unsat()
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
)
if conc_cache_enabled:
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
concretization_stats = self.control.statistics
@@ -1755,17 +1735,15 @@ def define_variant(
pkg_fact(fn.variant_condition(name, vid, cond_id))
# record type so we can construct the variant when we read it back in
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.string))
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.value))
if variant_def.sticky:
pkg_fact(fn.variant_sticky(vid))
# define defaults for this variant definition
if variant_def.multi:
for val in sorted(variant_def.make_default().values):
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
else:
pkg_fact(fn.variant_default_value_from_package_py(vid, variant_def.default))
defaults = variant_def.make_default().value if variant_def.multi else [variant_def.default]
for val in sorted(defaults):
pkg_fact(fn.variant_default_value_from_package_py(vid, val))
# define possible values for this variant definition
values = variant_def.values
@@ -1793,9 +1771,7 @@ def define_variant(
# make a spec indicating whether the variant has this conditional value
variant_has_value = spack.spec.Spec()
variant_has_value.variants[name] = vt.VariantValue(
vt.VariantType.MULTI, name, (value.value,)
)
variant_has_value.variants[name] = vt.VariantBase(name, value.value)
if value.when:
# the conditional value is always "possible", but it imposes its when condition as
@@ -1908,7 +1884,7 @@ def condition(
if not context:
context = ConditionContext()
context.transform_imposed = remove_facts("node", "virtual_node")
context.transform_imposed = remove_node
if imposed_spec:
imposed_name = imposed_spec.name or imposed_name
@@ -2008,7 +1984,7 @@ def track_dependencies(input_spec, requirements):
return requirements + [fn.attr("track_dependencies", input_spec.name)]
def dependency_holds(input_spec, requirements):
result = remove_facts("node", "virtual_node")(input_spec, requirements) + [
result = remove_node(input_spec, requirements) + [
fn.attr(
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
)
@@ -2198,10 +2174,7 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
pkg_name, ConstraintOrigin.REQUIRE
)
if not virtual:
context.transform_required = remove_build_deps
context.transform_imposed = remove_facts(
"node", "virtual_node", "depends_on"
)
context.transform_imposed = remove_node
# else: for virtuals we want to emit "node" and
# "virtual_node" in imposed specs
@@ -2263,18 +2236,16 @@ def external_packages(self):
if pkg_name not in self.pkgs:
continue
self.gen.h2(f"External package: {pkg_name}")
# Check if the external package is buildable. If it is
# not then "external(<pkg>)" is a fact, unless we can
# reuse an already installed spec.
external_buildable = data.get("buildable", True)
externals = data.get("externals", [])
if not external_buildable or externals:
self.gen.h2(f"External package: {pkg_name}")
if not external_buildable:
self.gen.fact(fn.buildable_false(pkg_name))
# Read a list of all the specs for this package
externals = data.get("externals", [])
candidate_specs = [
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
]
@@ -2377,7 +2348,7 @@ def preferred_variants(self, pkg_name):
)
continue
for value in variant.values:
for value in variant.value_as_tuple:
for variant_def in variant_defs:
self.variant_values_from_specs.add((pkg_name, id(variant_def), value))
self.gen.fact(
@@ -2492,10 +2463,10 @@ def _spec_clauses(
# TODO: variant="*" means 'variant is defined to something', which used to
# be meaningless in concretization, as all variants had to be defined. But
# now that variants can be conditional, it should force a variant to exist.
if not variant.values:
if variant.value == ("*",):
continue
for value in variant.values:
for value in variant.value_as_tuple:
# ensure that the value *can* be valid for the spec
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
variant_defs = vt.prevalidate_variant_value(
@@ -2605,16 +2576,6 @@ def _spec_clauses(
# already-installed concrete specs.
if concrete_build_deps or dspec.depflag != dt.BUILD:
clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
elif not concrete_build_deps and dspec.depflag:
clauses.append(
fn.attr(
"concrete_build_dependency", spec.name, dep.name, dep.dag_hash()
)
)
for virtual_name in dspec.virtuals:
clauses.append(
fn.attr("virtual_on_build_edge", spec.name, dep.name, virtual_name)
)
# if the spec is abstract, descend into dependencies.
# if it's concrete, then the hashes above take care of dependency
@@ -3308,13 +3269,15 @@ def literal_specs(self, specs):
# These facts are needed to compute the "condition_set" of the root
pkg_name = clause.args[1]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
elif clause_name == "depends_on":
pkg_name = clause.args[2]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
requirements.append(
fn.attr(
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
)
)
requirements = [x for x in requirements if x.args[0] != "depends_on"]
cache[imposed_spec_key] = (effect_id, requirements)
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
@@ -3832,13 +3795,13 @@ def node_os(self, node, os):
def node_target(self, node, target):
self._arch(node).target = target
def variant_selected(self, node, name: str, value: str, variant_type: str, variant_id):
def variant_selected(self, node, name, value, variant_type, variant_id):
spec = self._specs[node]
variant = spec.variants.get(name)
if not variant:
spec.variants[name] = vt.VariantValue.from_concretizer(name, value, variant_type)
spec.variants[name] = vt.VariantType(variant_type).variant_class(name, value)
else:
assert variant_type == "multi", (
assert variant_type == vt.VariantType.MULTI.value, (
f"Can't have multiple values for single-valued variant: "
f"{node}, {name}, {value}, {variant_type}, {variant_id}"
)
@@ -3862,17 +3825,6 @@ def external_spec_selected(self, node, idx):
)
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
# Annotate compiler specs from externals
external_spec = spack.spec.Spec(spec_info["spec"])
external_spec_deps = external_spec.dependencies()
if len(external_spec_deps) > 1:
raise InvalidExternalError(
f"external spec {spec_info['spec']} cannot have more than one dependency"
)
elif len(external_spec_deps) == 1:
compiler_str = external_spec_deps[0]
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
# If this is an extension, update the dependencies to include the extendee
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
extendee_spec = package.extendee_spec
@@ -4228,10 +4180,10 @@ def _inject_patches_variant(root: spack.spec.Spec) -> None:
continue
patches = list(spec_to_patches[id(spec)])
variant: vt.VariantValue = spec.variants.setdefault(
variant: vt.MultiValuedVariant = spec.variants.setdefault(
"patches", vt.MultiValuedVariant("patches", ())
)
variant.set(*(p.sha256 for p in patches))
variant.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches variant to store patches order
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
ordered_hashes.sort()
@@ -4699,9 +4651,13 @@ def solve_in_rounds(
break
if not result.specs:
# This is also a problem: no specs were solved for, which means we would be in a
# loop if we tried again
raise OutputDoesNotSatisfyInputError(result.unsolved_specs)
# This is also a problem: no specs were solved for, which
# means we would be in a loop if we tried again
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: a subset of input specs could not"
f" be solved for.\n\t{unsolved_str}"
)
input_specs = list(x for (x, y) in result.unsolved_specs)
for spec in result.specs:
@@ -4731,19 +4687,6 @@ def __init__(self, msg):
self.constraint_type = None
class OutputDoesNotSatisfyInputError(InternalConcretizerError):
def __init__(
self, input_to_output: List[Tuple[spack.spec.Spec, Optional[spack.spec.Spec]]]
) -> None:
self.input_to_output = input_to_output
super().__init__(
"internal solver error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{Result.format_unsolved(input_to_output)}"
)
class SolverError(InternalConcretizerError):
"""For cases where the solver is unable to produce a solution.
@@ -4776,7 +4719,3 @@ class InvalidSpliceError(spack.error.SpackError):
class NoCompilerFoundError(spack.error.SpackError):
"""Raised when there is no possible compiler"""
class InvalidExternalError(spack.error.SpackError):
"""Raised when there is no possible compiler"""

View File

@@ -175,24 +175,12 @@ trigger_node(TriggerID, Node, Node) :-
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
1 { literal_node(Root, node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
mentioned_in_literal(Root, Mentioned), Mentioned != Root,
internal_error("must have exactly one condition_set for literals").
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
literal_node(Root, LiteralNode),
build(LiteralNode),
not external(LiteralNode),
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
condition_set(LiteralNode, BuildNode) :- build_dependency_of_literal_node(LiteralNode, BuildNode).
:- build_dependency_of_literal_node(LiteralNode, BuildNode),
not attr("depends_on", LiteralNode, BuildNode, "build").
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
explicitly_requested_root(node(min_dupe_id, Package)) :-
solve_literal(TriggerID),
@@ -484,53 +472,10 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
imposed_constraint(ID, "depends_on", A1, A2, A3),
internal_error("Build deps must land in exactly one duplicate").
% If the parent is built, then we have a build_requirement on another node. For concrete nodes,
% or external nodes, we don't since we are trimming their build dependencies.
1 { attr("depends_on", node(X, Parent), node(0..Y-1, BuildDependency), "build") : max_dupes(BuildDependency, Y) } 1
1 { build_requirement(node(X, Parent), node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
build(node(X, Parent)),
not external(node(X, Parent)).
% Concrete nodes
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
concrete(ParentNode),
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
:- attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
% External nodes
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
external(ParentNode),
not attr("external_build_requirement", ParentNode, build_requirement("node", BuildDependency)).
candidate_external_version(Constraint, BuildDependency, Version)
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
external(ParentNode),
pkg_fact(BuildDependency, version_satisfies(Constraint, Version)).
error(100, "External {0} cannot satisfy both {1} and {2}", BuildDependency, LiteralConstraint, ExternalConstraint)
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, LiteralConstraint)),
external(ParentNode),
attr("external_build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, ExternalConstraint)),
not 1 { pkg_fact(BuildDependency, version_satisfies(ExternalConstraint, Version)) : candidate_external_version(LiteralConstraint, BuildDependency, Version) }.
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
% we have on externals
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
Parent == BuildDependency,
external(node(X, Parent)).
build_requirement(node(X, Parent), node(Y, BuildDependency)) :-
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build"),
attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)).
impose(ID, node(X, Parent)),
imposed_constraint(ID,"build_requirement",Parent,_).
1 { virtual_build_requirement(ParentNode, node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1
:- attr("dependency_holds", ParentNode, Virtual, "build"),
@@ -551,6 +496,7 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
build_requirement(ParentNode, node(X, BuildDependency)).
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
@@ -936,12 +882,6 @@ requirement_weight(node(ID, Package), Group, W) :-
requirement_policy(Package, Group, "one_of"),
requirement_group_satisfied(node(ID, Package), Group).
{ attr("build_requirement", node(ID, Package), BuildRequirement) : condition_requirement(TriggerID, "build_requirement", Package, BuildRequirement) } :-
pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
requirement_group_member(ConditionID, Package, Group),
activate_requirement(node(ID, Package), Group),
requirement_group(Package, Group).
requirement_group_satisfied(node(ID, Package), X) :-
1 { condition_holds(Y, node(ID, Package)) : requirement_group_member(Y, Package, X) } ,
requirement_policy(Package, X, "any_of"),

View File

@@ -85,10 +85,8 @@ def is_virtual(self, name: str) -> bool:
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
"""Returns true if a package is allowed on the current host"""
pkg_cls = self.repo.get_pkg_class(pkg_name)
no_condition = spack.spec.Spec()
for when_spec, conditions in pkg_cls.requirements.items():
# Restrict analysis to unconditional requirements
if when_spec != no_condition:
if not when_spec.intersects(self._platform_condition):
continue
for requirements, _, _ in conditions:
if not any(x.intersects(self._platform_condition) for x in requirements):

View File

@@ -111,14 +111,22 @@
__all__ = [
"CompilerSpec",
"Spec",
"SpecParseError",
"UnsupportedPropagationError",
"DuplicateDependencyError",
"DuplicateCompilerSpecError",
"UnsupportedCompilerError",
"DuplicateArchitectureError",
"InconsistentSpecError",
"InvalidDependencyError",
"NoProviderError",
"MultipleProviderError",
"UnsatisfiableSpecNameError",
"UnsatisfiableVersionSpecError",
"UnsatisfiableCompilerSpecError",
"UnsatisfiableCompilerFlagSpecError",
"UnsatisfiableArchitectureSpecError",
"UnsatisfiableProviderSpecError",
"UnsatisfiableDependencySpecError",
"AmbiguousHashError",
"InvalidHashError",
@@ -837,7 +845,7 @@ def _shared_subset_pair_iterate(container1, container2):
b_idx += 1
class FlagMap(lang.HashableMap[str, List[CompilerFlag]]):
class FlagMap(lang.HashableMap):
__slots__ = ("spec",)
def __init__(self, spec):
@@ -1429,7 +1437,7 @@ def with_compiler(self, compiler: "Spec") -> "SpecAnnotations":
def __repr__(self) -> str:
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
if self.compiler_node_attribute:
result += f".with_compiler({str(self.compiler_node_attribute)})"
result += f"with_compiler({str(self.compiler_node_attribute)})"
return result
@@ -1698,9 +1706,7 @@ def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
result[key] = list(group)
return result
def _add_flag(
self, name: str, value: Union[str, bool], propagate: bool, concrete: bool
) -> None:
def _add_flag(self, name: str, value: str, propagate: bool, concrete: bool) -> None:
"""Called by the parser to add a known flag"""
if propagate and name in vt.RESERVED_NAMES:
@@ -1710,7 +1716,6 @@ def _add_flag(
valid_flags = FlagMap.valid_compiler_flags()
if name == "arch" or name == "architecture":
assert type(value) is str, "architecture have a string value"
parts = tuple(value.split("-"))
plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
self._set_architecture(platform=plat, os=os, target=tgt)
@@ -1724,15 +1729,17 @@ def _add_flag(
self.namespace = value
elif name in valid_flags:
assert self.compiler_flags is not None
assert type(value) is str, f"{name} must have a string value"
flags_and_propagation = spack.compilers.flags.tokenize_flags(value, propagate)
flag_group = " ".join(x for (x, y) in flags_and_propagation)
for flag, propagation in flags_and_propagation:
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
else:
self.variants[name] = vt.VariantValue.from_string_or_bool(
name, value, propagate=propagate, concrete=concrete
)
if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
self.variants[name] = vt.BoolValuedVariant(name, value, propagate)
elif concrete:
self.variants[name] = vt.MultiValuedVariant(name, value, propagate)
else:
self.variants[name] = vt.VariantBase(name, value, propagate)
def _set_architecture(self, **kwargs):
"""Called by the parser to set the architecture."""
@@ -1861,7 +1868,9 @@ def add_dependency_edge(
@property
def fullname(self):
return (
f"{self.namespace}.{self.name}" if self.namespace else (self.name if self.name else "")
("%s.%s" % (self.namespace, self.name))
if self.namespace
else (self.name if self.name else "")
)
@property
@@ -3392,7 +3401,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return True
# If we have no dependencies, we can't satisfy any constraints.
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
if not self._dependencies:
return False
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
@@ -3403,7 +3412,6 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# verify the edge properties, cause everything is encoded in the hash of the nodes that
# will be verified later.
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
mock_nodes_from_old_specfiles = set()
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
@@ -3425,27 +3433,13 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
except KeyError:
return False
if current_node.original_spec_format() < 5 or (
current_node.original_spec_format() >= 5 and current_node.external
):
compiler_spec = current_node.annotations.compiler_node_attribute
if compiler_spec is None:
return False
mock_nodes_from_old_specfiles.add(compiler_spec)
# This checks that the single node compiler spec satisfies the request
# of a direct dependency. The check is not perfect, but based on heuristic.
if not compiler_spec.satisfies(rhs_edge.spec):
return False
else:
candidates = current_node.dependencies(
name=rhs_edge.spec.name,
deptype=rhs_edge.depflag,
virtuals=rhs_edge.virtuals or None,
)
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
return False
candidates = current_node.dependencies(
name=rhs_edge.spec.name,
deptype=rhs_edge.depflag,
virtuals=rhs_edge.virtuals or None,
)
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
return False
continue
@@ -3485,9 +3479,8 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return False
# Edges have been checked above already, hence deps=False
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
return all(
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
for rhs in other.traverse(root=False)
)
@@ -3961,8 +3954,6 @@ def format_attribute(match_object: Match) -> str:
except AttributeError:
if part == "compiler":
return "none"
elif part == "specfile_version":
return f"v{current.original_spec_format()}"
raise SpecFormatStringError(
f"Attempted to format attribute {attribute}. "
@@ -4488,7 +4479,7 @@ def has_virtual_dependency(self, virtual: str) -> bool:
return bool(self.dependencies(virtuals=(virtual,)))
class VariantMap(lang.HashableMap[str, vt.VariantValue]):
class VariantMap(lang.HashableMap):
"""Map containing variant instances. New values can be added only
if the key is not already present."""
@@ -4498,7 +4489,7 @@ def __init__(self, spec: Spec):
def __setitem__(self, name, vspec):
# Raise a TypeError if vspec is not of the right type
if not isinstance(vspec, vt.VariantValue):
if not isinstance(vspec, vt.VariantBase):
raise TypeError(
"VariantMap accepts only values of variant types "
f"[got {type(vspec).__name__} instead]"
@@ -4638,7 +4629,7 @@ def __str__(self):
bool_keys = []
kv_keys = []
for key in sorted_keys:
if self[key].type == vt.VariantType.BOOL:
if isinstance(self[key].value, bool):
bool_keys.append(key)
else:
kv_keys.append(key)
@@ -4671,8 +4662,7 @@ def substitute_abstract_variants(spec: Spec):
unknown = []
for name, v in spec.variants.items():
if name == "dev_path":
v.type = vt.VariantType.SINGLE
v.concrete = True
spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
continue
elif name in vt.RESERVED_NAMES:
continue
@@ -4695,7 +4685,7 @@ def substitute_abstract_variants(spec: Spec):
if rest:
continue
new_variant = pkg_variant.make_variant(*v.values)
new_variant = pkg_variant.make_variant(v._original_value)
pkg_variant.validate_or_raise(new_variant, spec.name)
spec.variants.substitute(new_variant)
@@ -4821,7 +4811,7 @@ def from_node_dict(cls, node):
for val in values:
spec.compiler_flags.add_flag(name, val, propagate)
else:
spec.variants[name] = vt.VariantValue.from_node_dict(
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
name, values, propagate=propagate, abstract=name in abstract_variants
)
@@ -4847,7 +4837,7 @@ def from_node_dict(cls, node):
patches = node["patches"]
if len(patches) > 0:
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.set(*patches)
mvar.value = patches
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
@@ -5172,6 +5162,25 @@ def eval_conditional(string):
return eval(string, valid_variables)
class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs."""
def __init__(self, parse_error):
super().__init__(parse_error.message)
self.string = parse_error.string
self.pos = parse_error.pos
@property
def long_message(self):
return "\n".join(
[
" Encountered when parsing spec:",
" %s" % self.string,
" %s^" % (" " * self.pos),
]
)
class InvalidVariantForSpecError(spack.error.SpecError):
"""Raised when an invalid conditional variant is specified."""
@@ -5189,6 +5198,14 @@ class DuplicateDependencyError(spack.error.SpecError):
"""Raised when the same dependency occurs in a spec twice."""
class MultipleVersionError(spack.error.SpecError):
"""Raised when version constraints occur in a spec twice."""
class DuplicateCompilerSpecError(spack.error.SpecError):
"""Raised when the same compiler occurs in a spec twice."""
class UnsupportedCompilerError(spack.error.SpecError):
"""Raised when the user asks for a compiler spack doesn't know about."""
@@ -5197,6 +5214,11 @@ class DuplicateArchitectureError(spack.error.SpecError):
"""Raised when the same architecture occurs in a spec twice."""
class InconsistentSpecError(spack.error.SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
constraints."""
class InvalidDependencyError(spack.error.SpecError):
"""Raised when a dependency in a spec is not actually a dependency
of the package."""
@@ -5208,6 +5230,30 @@ def __init__(self, pkg, deps):
)
class NoProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
virtual dependency.
"""
def __init__(self, vpkg):
super().__init__("No providers found for virtual package: '%s'" % vpkg)
self.vpkg = vpkg
class MultipleProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
virtual dependency.
"""
def __init__(self, vpkg, providers):
"""Takes the name of the vpkg"""
super().__init__(
"Multiple providers found for '%s': %s" % (vpkg, [str(s) for s in providers])
)
self.vpkg = vpkg
self.providers = providers
class UnsatisfiableSpecNameError(spack.error.UnsatisfiableSpecError):
"""Raised when two specs aren't even for the same package."""
@@ -5222,6 +5268,20 @@ def __init__(self, provided, required):
super().__init__(provided, required, "version")
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec compiler conflicts with package constraints."""
def __init__(self, provided, required):
super().__init__(provided, required, "compiler")
class UnsatisfiableCompilerFlagSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required):
super().__init__(provided, required, "compiler_flags")
class UnsatisfiableArchitectureSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
@@ -5229,6 +5289,14 @@ def __init__(self, provided, required):
super().__init__(provided, required, "architecture")
class UnsatisfiableProviderSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a provider is supplied but constraints don't match
a vpkg requirement"""
def __init__(self, provided, required):
super().__init__(provided, required, "provider")
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
class UnsatisfiableDependencySpecError(spack.error.UnsatisfiableSpecError):

View File

@@ -0,0 +1,263 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
from typing import List
import spack.spec
import spack.variant
from spack.error import SpackError
from spack.spec import Spec
class SpecList:
def __init__(self, name="specs", yaml_list=None, reference=None):
# Normalize input arguments
yaml_list = yaml_list or []
reference = reference or {}
self.name = name
self._reference = reference # TODO: Do we need defensive copy here?
# Validate yaml_list before assigning
if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
raise ValueError(
"yaml_list can contain only valid YAML types! Found:\n %s"
% [type(s) for s in yaml_list]
)
self.yaml_list = yaml_list[:]
# Expansions can be expensive to compute and difficult to keep updated
# We cache results and invalidate when self.yaml_list changes
self._expanded_list = None
self._constraints = None
self._specs = None
@property
def is_matrix(self):
for item in self.specs_as_yaml_list:
if isinstance(item, dict):
return True
return False
@property
def specs_as_yaml_list(self):
if self._expanded_list is None:
self._expanded_list = self._expand_references(self.yaml_list)
return self._expanded_list
@property
def specs_as_constraints(self):
if self._constraints is None:
constraints = []
for item in self.specs_as_yaml_list:
if isinstance(item, dict): # matrix of specs
constraints.extend(_expand_matrix_constraints(item))
else: # individual spec
constraints.append([Spec(item)])
self._constraints = constraints
return self._constraints
@property
def specs(self) -> List[Spec]:
if self._specs is None:
specs = []
# This could be slightly faster done directly from yaml_list,
# but this way is easier to maintain.
for constraint_list in self.specs_as_constraints:
spec = constraint_list[0].copy()
for const in constraint_list[1:]:
spec.constrain(const)
specs.append(spec)
self._specs = specs
return self._specs
def add(self, spec):
self.yaml_list.append(str(spec))
# expanded list can be updated without invalidation
if self._expanded_list is not None:
self._expanded_list.append(str(spec))
# Invalidate cache variables when we change the list
self._constraints = None
self._specs = None
def remove(self, spec):
# Get spec to remove from list
remove = [
s
for s in self.yaml_list
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
]
if not remove:
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
msg += f"Either {spec} is not in {self.name} or {spec} is "
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
# Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
# invalidate cache variables when we change the list
self._expanded_list = None
self._constraints = None
self._specs = None
def replace(self, idx: int, spec: str):
"""Replace the existing spec at the index with the new one.
Args:
idx: index of the spec to replace in the speclist
spec: new spec
"""
self.yaml_list[idx] = spec
# invalidate cache variables when we change the list
self._expanded_list = None
self._constraints = None
self._specs = None
def extend(self, other, copy_reference=True):
self.yaml_list.extend(other.yaml_list)
self._expanded_list = None
self._constraints = None
self._specs = None
if copy_reference:
self._reference = other._reference
def update_reference(self, reference):
self._reference = reference
self._expanded_list = None
self._constraints = None
self._specs = None
def _parse_reference(self, name):
sigil = ""
name = name[1:]
# Parse specs as constraints
if name.startswith("^") or name.startswith("%"):
sigil = name[0]
name = name[1:]
# Make sure the reference is valid
if name not in self._reference:
msg = f"SpecList '{self.name}' refers to named list '{name}'"
msg += " which does not appear in its reference dict."
raise UndefinedReferenceError(msg)
return (name, sigil)
def _expand_references(self, yaml):
if isinstance(yaml, list):
ret = []
for item in yaml:
# if it's a reference, expand it
if isinstance(item, str) and item.startswith("$"):
# replace the reference and apply the sigil if needed
name, sigil = self._parse_reference(item)
referent = [
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
]
ret.extend(referent)
else:
# else just recurse
ret.append(self._expand_references(item))
return ret
elif isinstance(yaml, dict):
# There can't be expansions in dicts
return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
else:
# Strings are just returned
return yaml
def __len__(self):
return len(self.specs)
def __getitem__(self, key):
return self.specs[key]
def __iter__(self):
return iter(self.specs)
def _expand_matrix_constraints(matrix_config):
# recurse so we can handle nested matrices
expanded_rows = []
for row in matrix_config["matrix"]:
new_row = []
for r in row:
if isinstance(r, dict):
# Flatten the nested matrix into a single row of constraints
new_row.extend(
[
[" ".join([str(c) for c in expanded_constraint_list])]
for expanded_constraint_list in _expand_matrix_constraints(r)
]
)
else:
new_row.append([r])
expanded_rows.append(new_row)
excludes = matrix_config.get("exclude", []) # only compute once
sigil = matrix_config.get("sigil", "")
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
# This method is best effort, so all existing variants will be
# converted before any error is raised.
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.spec.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
# Resolve abstract hashes for exclusion criteria
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
continue
if sigil:
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
# Add to list of constraints
results.append(flat_combo)
return results
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:
item["sigil"] = sigil
return item
else:
return sigil + item
class SpecListError(SpackError):
"""Error class for all errors related to SpecList objects."""
class UndefinedReferenceError(SpecListError):
"""Error class for undefined references in Spack stacks."""
class InvalidSpecConstraintError(SpecListError):
"""Error class for invalid spec constraints at concretize time."""

View File

@@ -62,7 +62,7 @@
import sys
import traceback
import warnings
from typing import Iterator, List, Optional, Tuple, Union
from typing import Iterator, List, Optional, Tuple
from llnl.util.tty import color
@@ -369,7 +369,7 @@ def raise_parsing_error(string: str, cause: Optional[Exception] = None):
"""Raise a spec parsing error with token context."""
raise SpecParsingError(string, self.ctx.current_token, self.literal_str) from cause
def add_flag(name: str, value: Union[str, bool], propagate: bool, concrete: bool):
def add_flag(name: str, value: str, propagate: bool, concrete: bool):
"""Wrapper around ``Spec._add_flag()`` that adds parser context to errors raised."""
try:
initial_spec._add_flag(name, value, propagate, concrete)

View File

@@ -93,7 +93,6 @@ def test_package_audits(packages, expected_errors, mock_packages):
]
# TODO/RepoSplit: Should this not rely on mock packages post split?
@pytest.mark.parametrize(
"config_section,data,failing_check",
[
@@ -114,7 +113,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
),
],
)
def test_config_audits(config_section, data, failing_check, mock_packages):
def test_config_audits(config_section, data, failing_check):
with spack.config.override(config_section, data):
reports = spack.audit.run_group("configs")
assert any((check == failing_check) and errors for check, errors in reports)

View File

@@ -15,7 +15,7 @@
@pytest.fixture()
def builder_test_repository(config):
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
with spack.repo.use_repositories(builder_test_path) as mock_repo:
yield mock_repo

View File

@@ -873,6 +873,10 @@ def test_push_to_build_cache(
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
dl_dir = scratch / "download_dir"
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
assert len(os.listdir(dl_dir)) == 2
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
def push_or_raise(*args, **kwargs):
@@ -2032,7 +2036,7 @@ def test_ci_verify_versions_valid(
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
out = ci_cmd("verify-versions", commits[-1], commits[-3])
assert "Validated diff-test@2.1.5" in out
@@ -2049,7 +2053,7 @@ def test_ci_verify_versions_standard_invalid(
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
assert "Invalid checksum found diff-test@2.1.5" in out
@@ -2060,7 +2064,7 @@ def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
pkg_class = spack.spec.Spec("diff-test").package_class
monkeypatch.setattr(pkg_class, "manual_download", True)

View File

@@ -20,8 +20,6 @@
config = spack.main.SpackCommand("config")
env = spack.main.SpackCommand("env")
pytestmark = pytest.mark.usefixtures("mock_packages")
def _create_config(scope=None, data={}, section="packages"):
scope = scope or spack.config.default_modify_scope()

View File

@@ -1829,7 +1829,7 @@ def test_indirect_build_dep(tmp_path):
build-only dep. Make sure this concrete DAG is preserved when writing the
environment out and reading it back.
"""
builder = spack.repo.MockRepositoryBuilder(tmp_path)
builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None)])
@@ -1862,7 +1862,7 @@ def test_store_different_build_deps(tmp_path):
z1
"""
builder = spack.repo.MockRepositoryBuilder(tmp_path)
builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])

View File

@@ -9,8 +9,6 @@
import spack.cmd.info
from spack.main import SpackCommand
pytestmark = [pytest.mark.usefixtures("mock_packages")]
info = SpackCommand("info")
@@ -33,12 +31,15 @@ def _print(*args, **kwargs):
return buffer
@pytest.mark.parametrize(
"pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"]
)
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
def test_it_just_runs(extra_args):
info("vtk-m", *extra_args)
def test_it_just_runs(pkg, extra_args):
info(pkg, *extra_args)
def test_info_noversion(print_buffer):
def test_info_noversion(mock_packages, print_buffer):
"""Check that a mock package with no versions outputs None."""
info("noversion")
@@ -57,7 +58,7 @@ def test_info_noversion(print_buffer):
@pytest.mark.parametrize(
"pkg_query,expected", [("zlib", "False"), ("find-externals1", "True (version)")]
)
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, print_buffer):
args = parser.parse_args(["--detectable", pkg_query])
spack.cmd.info.info(parser, args)
@@ -69,7 +70,13 @@ def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
@pytest.mark.parametrize(
"pkg_query", ["vtk-m", "gcc"] # This should ensure --test's c_names processing loop covered
"pkg_query",
[
"hdf5",
"cloverleaf3d",
"trilinos",
"gcc", # This should ensure --test's c_names processing loop covered
],
)
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
def test_info_fields(pkg_query, extra_args, parser, print_buffer):

View File

@@ -66,30 +66,33 @@ def test_install_package_and_dependency(
assert 'errors="0"' in content
def _check_runtests_none(pkg):
assert not pkg.run_tests
def _check_runtests_dttop(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
def _check_runtests_all(pkg):
assert pkg.run_tests
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert not pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_none)
install("-v", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_root(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_dttop)
install("--test=root", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_all(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_all)
install("--test=all", "pkg-a")
@@ -377,6 +380,7 @@ def test_install_from_file(spec, concretize, error_code, tmpdir):
def test_junit_output_with_failures(tmpdir, exc_typename, msg):
with tmpdir.as_cwd():
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"raiser",
@@ -409,6 +413,21 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
assert msg in content
def _throw(task, exc_typename, exc_type, msg):
# Self is a spack.installer.Task
exc_type = getattr(builtins, exc_typename)
exc = exc_type(msg)
task.fail(exc)
def _runtime_error(task, *args, **kwargs):
_throw(task, "RuntimeError", spack.error.InstallError, "something weird happened")
def _keyboard_error(task, *args, **kwargs):
_throw(task, "KeyboardInterrupt", KeyboardInterrupt, "Ctrl-C strikes again")
@pytest.mark.disable_clean_stage_check
@pytest.mark.parametrize(
"exc_typename,expected_exc,msg",
@@ -428,14 +447,17 @@ def test_junit_output_with_errors(
tmpdir,
monkeypatch,
):
def just_throw(*args, **kwargs):
exc_type = getattr(builtins, exc_typename)
raise exc_type(msg)
monkeypatch.setattr(spack.installer.PackageInstaller, "_install_task", just_throw)
throw = _keyboard_error if expected_exc == KeyboardInterrupt else _runtime_error
monkeypatch.setattr(spack.installer.BuildTask, "complete", throw)
with tmpdir.as_cwd():
install("--log-format=junit", "--log-file=test.xml", "libdwarf", fail_on_error=False)
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"trivial-install-test-dependent",
fail_on_error=False,
)
assert isinstance(install.error, expected_exc)
@@ -445,7 +467,7 @@ def just_throw(*args, **kwargs):
content = filename.open().read()
# Only libelf error is reported (through libdwarf root spec). libdwarf
# Only original error is reported, dependent
# install is skipped and it is not an error.
assert 'tests="0"' not in content
assert 'failures="0"' in content
@@ -1079,7 +1101,10 @@ def install_use_buildcache(opt):
@pytest.mark.disable_clean_stage_check
def test_padded_install_runtests_root(install_mockery, mock_fetch):
spack.config.set("config:install_tree:padded_length", 255)
output = install("--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False)
output = install(
"--verbose", "--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False
)
print(output)
assert output.count("method not implemented") == 1

View File

@@ -6,20 +6,16 @@
import sys
from textwrap import dedent
import pytest
import spack.paths
import spack.repo
from spack.main import SpackCommand
pytestmark = [pytest.mark.usefixtures("mock_packages")]
list = SpackCommand("list")
def test_list():
output = list()
assert "bzip2" in output
assert "cloverleaf3d" in output
assert "hdf5" in output
@@ -45,7 +41,7 @@ def test_list_cli_output_format(mock_tty_stdout):
assert out == out_str
def test_list_filter():
def test_list_filter(mock_packages):
output = list("py-*")
assert "py-extension1" in output
assert "py-extension2" in output
@@ -61,18 +57,18 @@ def test_list_filter():
assert "mpich" not in output
def test_list_search_description():
def test_list_search_description(mock_packages):
output = list("--search-description", "one build dependency")
assert "depb" in output
def test_list_format_name_only():
def test_list_format_name_only(mock_packages):
output = list("--format", "name_only")
assert "zmpi" in output
assert "hdf5" in output
def test_list_format_version_json():
def test_list_format_version_json(mock_packages):
output = list("--format", "version_json")
assert '{"name": "zmpi",' in output
assert '{"name": "dyninst",' in output
@@ -81,7 +77,7 @@ def test_list_format_version_json():
json.loads(output)
def test_list_format_html():
def test_list_format_html(mock_packages):
output = list("--format", "html")
assert '<div class="section" id="zmpi">' in output
assert "<h1>zmpi" in output
@@ -90,7 +86,7 @@ def test_list_format_html():
assert "<h1>hdf5" in output
def test_list_update(tmpdir):
def test_list_update(tmpdir, mock_packages):
update_file = tmpdir.join("output")
# not yet created when list is run
@@ -117,7 +113,7 @@ def test_list_update(tmpdir):
assert f.read() == "empty\n"
def test_list_tags():
def test_list_tags(mock_packages):
output = list("--tag", "tag1")
assert "mpich" in output
assert "mpich2" in output
@@ -131,7 +127,7 @@ def test_list_tags():
assert "mpich2" in output
def test_list_count():
def test_list_count(mock_packages):
output = list("--count")
assert int(output.strip()) == len(spack.repo.all_package_names())
@@ -141,10 +137,11 @@ def test_list_count():
)
# def test_list_repos(mock_packages, builder_test_repository):
def test_list_repos():
with spack.repo.use_repositories(
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
os.path.join(spack.paths.test_repos_path, "builder.test"),
os.path.join(spack.paths.repos_path, "builtin.mock"),
os.path.join(spack.paths.repos_path, "builder.test"),
):
total_pkgs = len(list().strip().split())
mock_pkgs = len(list("-r", "builtin.mock").strip().split())

View File

@@ -111,12 +111,12 @@ def split(output):
pkg = spack.main.SpackCommand("pkg")
def test_builtin_repo():
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")
def test_packages_path():
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin").packages_path
def test_mock_builtin_repo(mock_packages):
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
def test_mock_packages_path(mock_packages):
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin.mock").packages_path
def test_pkg_add(git, mock_pkg_git_repo):

Some files were not shown because too many files have changed in this diff Show More