Compare commits

..

11 Commits

Author SHA1 Message Date
Gregory Becker
d87158a80f more robust buildcache tests for concretization to non-default arch
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-28 13:42:40 -05:00
Gregory Becker
441efad2d5 fix count test now that builtin.mock.mpileaks has build deps
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-28 13:42:14 -05:00
Gregory Becker
b6f394ed00 fixup after rebase
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-26 12:12:29 -07:00
Tamara Dahlgren
a8a62e8f5a Installer: update installation progress tracking
- test/installer: use existing inst for spack.installer
- remove install status from Installing message
- Add specs count visitor
- Report status on installed plus minor refactor
- Add the "+" to the tracker; include one experimental dynamic calculation
- tweak status reporting to include ensuring numerator unique across installed packages
- _print_installed_pkg -> InstallStatus.print_installed()
- move set_term_title outside of InstallStatus
- InstallStatus: remove unnecessary next_pkg
- InstallStatus: class and method name changes
  * changed InstallStatus to InstallerStatus since already have former in
    database.py and spec.py
  * changed print_installed to set_installed since does more than print now
- InstallerStatus -> InstallerProgress, install_status -> progress
- InstallerProgress: cache config:install_status
- InstallerProgress: restore get_progress and set_term_title methods (w/ tweaks)
- Task execute(): added returns to docstrings
- Don't pass progress to build_process or Installer.run, but set installed on successful return
- fix mypy issue with pkg.run_tests assignment
2025-03-26 09:29:07 -07:00
Gregory Becker
a3b6b873da rebase fixup 2025-03-26 09:22:56 -07:00
Gregory Becker
04f8ebd1eb PackageInstaller._install_task: fix type annotation 2025-03-26 09:22:56 -07:00
Gregory Becker
a3344c5672 refactor overwrite installs into main installer class 2025-03-26 09:22:52 -07:00
Gregory Becker
4f2f253bc3 update edges for existing tasks for build deps 2025-03-26 09:20:24 -07:00
Gregory Becker
78e39f2207 use db lookup that cannot return None 2025-03-26 09:20:24 -07:00
Tamara Dahlgren
eaf332c03e Resolve mypy issues 2025-03-26 09:20:24 -07:00
Gregory Becker
c74d6117e5 Installer: queue only link/run deps and requeue with build deps as needed
Refactors BuildTask into separate classes BuildTask and InstallTask
Queues all packages as InstallTask, with link/run deps only
If an InstallTask fails to install from binary, a BuildTask is generated
The BuildTask is queued with dependencies on the new InstallTasks for its
build deps and their link/run dependencies.
The Tasks telescope open to include all build deps of build deps ad-hoc
2025-03-26 09:20:15 -07:00
13266 changed files with 247087 additions and 249990 deletions

View File

@@ -28,7 +28,7 @@ max-line-length = 99
# - F821: undefined name `name` # - F821: undefined name `name`
# #
per-file-ignores = per-file-ignores =
var/spack/*/package.py:F403,F405,F821 var/spack/repos/*/package.py:F403,F405,F821
*-ci-package.py:F403,F405,F821 *-ci-package.py:F403,F405,F821
# exclude things we usually do not want linting for. # exclude things we usually do not want linting for.

3
.gitattributes vendored
View File

@@ -1,3 +1,4 @@
*.py diff=python *.py diff=python
*.lp linguist-language=Prolog
lib/spack/external/* linguist-vendored lib/spack/external/* linguist-vendored
*.bat text eol=crlf *.bat text eol=crlf

View File

@@ -59,6 +59,7 @@ jobs:
- name: Package audits (without coverage) - name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }} if: ${{ runner.os == 'Windows' }}
run: | run: |
. share/spack/setup-env.sh
spack -d audit packages spack -d audit packages
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
spack -d audit configs spack -d audit configs

View File

@@ -26,7 +26,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static gawk cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE - name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }} if: ${{ matrix.image == 'opensuse/leap:latest' }}
run: | run: |

View File

@@ -42,17 +42,17 @@ jobs:
# built-in repository or documentation # built-in repository or documentation
filters: | filters: |
bootstrap: bootstrap:
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**' - 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**' - 'var/spack/repos/builtin/packages/clingo/**'
- 'var/spack/repos/spack_repo/builtin/packages/python/**' - 'var/spack/repos/builtin/packages/python/**'
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**' - 'var/spack/repos/builtin/packages/re2c/**'
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**' - 'var/spack/repos/builtin/packages/gnupg/**'
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**' - 'var/spack/repos/builtin/packages/libassuan/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**' - 'var/spack/repos/builtin/packages/libgcrypt/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**' - 'var/spack/repos/builtin/packages/libgpg-error/**'
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**' - 'var/spack/repos/builtin/packages/libksba/**'
- 'var/spack/repos/spack_repo/builtin/packages/npth/**' - 'var/spack/repos/builtin/packages/npth/**'
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**' - 'var/spack/repos/builtin/packages/pinentry/**'
- 'lib/spack/**' - 'lib/spack/**'
- 'share/spack/**' - 'share/spack/**'
- '.github/workflows/bootstrap.yml' - '.github/workflows/bootstrap.yml'

View File

@@ -25,16 +25,14 @@ jobs:
with: with:
python-version: '3.13' python-version: '3.13'
cache: 'pip' cache: 'pip'
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
- name: Install Python Packages - name: Install Python Packages
run: | run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/requirements/style/requirements.txt
- name: vermin (Spack's Core) - name: vermin (Spack's Core)
run: | run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories) - name: vermin (Repositories)
run: | run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
# Run style checks on the files that have been changed # Run style checks on the files that have been changed
style: style:
@@ -42,20 +40,23 @@ jobs:
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with: with:
fetch-depth: 2 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with: with:
python-version: '3.13' python-version: '3.13'
cache: 'pip' cache: 'pip'
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/requirements/style/requirements.txt
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
- name: Run style tests - name: Run style tests
run: | run: |
bin/spack style --base HEAD^1 share/spack/qa/run-style-tests
bin/spack license verify
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
audit: audit:
uses: ./.github/workflows/audit.yaml uses: ./.github/workflows/audit.yaml
@@ -65,11 +66,7 @@ jobs:
python_version: '3.13' python_version: '3.13'
verify-checksums: verify-checksums:
# do not run if the commit message or PR description contains [skip-verify-checksums] if: ${{ inputs.with_packages == 'true' }}
if: >-
${{ inputs.with_packages == 'true' &&
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
@@ -106,3 +103,21 @@ jobs:
spack -d bootstrap now --dev spack -d bootstrap now --dev
spack -d style -t black spack -d style -t black
spack unit-test -V spack unit-test -V
# Further style checks from pylint
pylint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: '3.13'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pylint
- name: Pylint (Spack Core)
run: |
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib

View File

@@ -1,8 +1,7 @@
black==25.1.0 black==25.1.0
clingo==5.8.0 clingo==5.7.1
flake8==7.2.0 flake8==7.1.2
isort==6.0.1 isort==6.0.1
mypy==1.15.0 mypy==1.15.0
types-six==1.17.0.20250403 types-six==1.17.0.20250304
vermin==1.6.0 vermin==1.6.0
pylint==3.3.7

View File

@@ -19,6 +19,9 @@ jobs:
on_develop: on_develop:
- ${{ github.ref == 'refs/heads/develop' }} - ${{ github.ref == 'refs/heads/develop' }}
include: include:
- python-version: '3.6'
os: ubuntu-20.04
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.7' - python-version: '3.7'
os: ubuntu-22.04 os: ubuntu-22.04
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}

View File

@@ -46,42 +46,18 @@ See the
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html) [Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
for examples and highlights. for examples and highlights.
Installation To install spack and your first package, make sure you have Python & Git.
----------------
To install spack, first make sure you have Python & Git.
Then: Then:
```bash $ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git $ cd spack/bin
``` $ ./spack install zlib
<details>
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
<br>
> [!TIP]
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files. > `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
> >
> `--depth=2` prunes the git history to reduce the size of the Spack installation. > `--depth=2` prunes the git history to reduce the size of the Spack installation.
</details>
```bash
# For bash/zsh/sh
. spack/share/spack/setup-env.sh
# For tcsh/csh
source spack/share/spack/setup-env.csh
# For fish
. spack/share/spack/setup-env.fish
```
```bash
# Now you're ready to install a package!
spack install zlib-ng
```
Documentation Documentation
---------------- ----------------

View File

@@ -90,9 +90,10 @@ config:
misc_cache: $user_cache_path/cache misc_cache: $user_cache_path/cache
# Abort downloads after this many seconds if not data is received. # Timeout in seconds used for downloading sources etc. This only applies
# Setting this to 0 will disable the timeout. # to the connection phase and can be increased for slow connections or
connect_timeout: 30 # servers. 0 means no timeout.
connect_timeout: 10
# If this is false, tools like curl that use SSL will not verify # If this is false, tools like curl that use SSL will not verify

View File

@@ -25,8 +25,6 @@ packages:
glu: [apple-glu] glu: [apple-glu]
unwind: [apple-libunwind] unwind: [apple-libunwind]
uuid: [apple-libuuid] uuid: [apple-libuuid]
apple-clang:
buildable: false
apple-gl: apple-gl:
buildable: false buildable: false
externals: externals:

View File

@@ -72,8 +72,6 @@ packages:
permissions: permissions:
read: world read: world
write: user write: user
cce:
buildable: false
cray-fftw: cray-fftw:
buildable: false buildable: false
cray-libsci: cray-libsci:
@@ -88,23 +86,13 @@ packages:
buildable: false buildable: false
essl: essl:
buildable: false buildable: false
fj:
buildable: false
fujitsu-mpi: fujitsu-mpi:
buildable: false buildable: false
fujitsu-ssl2: fujitsu-ssl2:
buildable: false buildable: false
glibc:
buildable: false
hpcx-mpi: hpcx-mpi:
buildable: false buildable: false
iconv:
prefer: [libiconv]
mpt: mpt:
buildable: false buildable: false
musl:
buildable: false
spectrum-mpi: spectrum-mpi:
buildable: false buildable: false
xl:
buildable: false

View File

@@ -11,4 +11,4 @@
# ~/.spack/repos.yaml # ~/.spack/repos.yaml
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
repos: repos:
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin

View File

@@ -20,8 +20,3 @@ packages:
cxx: [msvc] cxx: [msvc]
mpi: [msmpi] mpi: [msmpi]
gl: [wgl] gl: [wgl]
mpi:
require:
- one_of: [msmpi]
msvc:
buildable: false

View File

@@ -1291,61 +1291,55 @@ based on site policies.
Variants Variants
^^^^^^^^ ^^^^^^^^
Variants are named options associated with a particular package and are Variants are named options associated with a particular package. They are
typically used to enable or disable certain features at build time. They optional, as each package must provide default values for each variant it
are optional, as each package must provide default values for each variant makes available. Variants can be specified using
it makes available. a flexible parameter syntax ``name=<value>``. For example,
``spack install mercury debug=True`` will install mercury built with debug
The names of variants available for a particular package depend on flags. The names of particular variants available for a package depend on
what was provided by the package author. ``spack info <package>`` will what was provided by the package author. ``spack info <package>`` will
provide information on what build variants are available. provide information on what build variants are available.
There are different types of variants: For compatibility with earlier versions, variants which happen to be
boolean in nature can be specified by a syntax that represents turning
options on and off. For example, in the previous spec we could have
supplied ``mercury +debug`` with the same effect of enabling the debug
compile time option for the libelf package.
1. Boolean variants. Typically used to enable or disable a feature at Depending on the package a variant may have any default value. For
compile time. For example, a package might have a ``debug`` variant that ``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
can be explicitly enabled with ``+debug`` and disabled with ``~debug``. with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
2. Single-valued variants. Often used to set defaults. For example, a package you can turn it off by either adding ``-name`` or ``~name`` to the spec.
might have a ``compression`` variant that determines the default
compression algorithm, which users could set to ``compression=gzip`` or
``compression=zstd``.
3. Multi-valued variants. A package might have a ``fabrics`` variant that
determines which network fabrics to support. Users could set this to
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
interfaces. The values are separated by commas.
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified There are two syntaxes here because, depending on context, ``~`` and
fabrics, but other fabrics may be enabled as well. If the intent is to ``-`` may mean different things. In most shells, the following will
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi`` result in the shell performing home directory substitution:
syntax should be used with the ``:=`` operator.
.. note:: .. code-block:: sh
In certain shells, the the ``~`` character is expanded to the home mpileaks ~debug # shell may try to substitute this!
directory. To avoid these issues, avoid whitespace between the package mpileaks~debug # use this instead
name and the variant:
.. code-block:: sh If there is a user called ``debug``, the ``~`` will be incorrectly
expanded. In this situation, you would want to write ``libelf
-debug``. However, ``-`` can be ambiguous when included after a
package name without spaces:
mpileaks ~debug # shell may try to substitute this! .. code-block:: sh
mpileaks~debug # use this instead
Alternatively, you can use the ``-`` character to disable a variant, mpileaks-debug # wrong!
but be aware that this requires a space between the package name and mpileaks -debug # right
the variant:
.. code-block:: sh Spack allows the ``-`` character to be part of package names, so the
above will be interpreted as a request for the ``mpileaks-debug``
package, not a request for ``mpileaks`` built without ``debug``
options. In this scenario, you should write ``mpileaks~debug`` to
avoid ambiguity.
mpileaks-debug # wrong: refers to a package named "mpileaks-debug" When spack normalizes specs, it prints them out with no spaces boolean
mpileaks -debug # right: refers to a package named mpileaks with debug disabled variants using the backwards compatibility syntax and uses only ``~``
for disabled boolean variants. The ``-`` and spaces on the command
As a last resort, ``debug=False`` can also be used to disable a boolean variant. line are provided for convenience and legibility.
"""""""""""""""""""""""""""""""""""
Variant propagation to dependencies
"""""""""""""""""""""""""""""""""""
Spack allows variants to propagate their value to the package's Spack allows variants to propagate their value to the package's
dependency by using ``++``, ``--``, and ``~~`` for boolean variants. dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
@@ -1415,29 +1409,27 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
gcc: - compiler:
externals: spec: gcc@4.9.3
- spec: gcc@4.9.3 paths:
prefix: /opt/gcc cc: /opt/gcc/bin/gcc
extra_attributes: c++: /opt/gcc/bin/g++
compilers: f77: /opt/gcc/bin/gfortran
c: /opt/gcc/bin/gcc fc: /opt/gcc/bin/gfortran
cxx: /opt/gcc/bin/g++ environment:
fortran: /opt/gcc/bin/gfortran unset:
environment: - BAD_VARIABLE
unset: set:
- BAD_VARIABLE GOOD_VARIABLE_NUM: 1
set: GOOD_VARIABLE_STR: good
GOOD_VARIABLE_NUM: 1 prepend_path:
GOOD_VARIABLE_STR: good PATH: /path/to/binutils
prepend_path: append_path:
PATH: /path/to/binutils LD_LIBRARY_PATH: /opt/gcc/lib
append_path: extra_rpaths:
LD_LIBRARY_PATH: /opt/gcc/lib - /path/to/some/compiler/runtime/directory
extra_rpaths: - /path/to/some/other/compiler/runtime/directory
- /path/to/some/compiler/runtime/directory
- /path/to/some/other/compiler/runtime/directory
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
@@ -1916,7 +1908,7 @@ diagnostics. Issues, if found, are reported to stdout:
PKG-DIRECTIVES: 1 issue found PKG-DIRECTIVES: 1 issue found
1. lammps: wrong variant in "conflicts" directive 1. lammps: wrong variant in "conflicts" directive
the variant 'adios' does not exist the variant 'adios' does not exist
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
------------ ------------

View File

@@ -63,6 +63,7 @@ on these ideas for each distinct build system that Spack supports:
build_systems/cudapackage build_systems/cudapackage
build_systems/custompackage build_systems/custompackage
build_systems/inteloneapipackage build_systems/inteloneapipackage
build_systems/intelpackage
build_systems/rocmpackage build_systems/rocmpackage
build_systems/sourceforgepackage build_systems/sourceforgepackage
@@ -83,7 +84,7 @@ packages. You can quickly find examples by running:
.. code-block:: console .. code-block:: console
$ cd var/spack/repos/spack_repo/builtin/packages $ cd var/spack/repos/builtin/packages
$ grep -l QMakePackage */package.py $ grep -l QMakePackage */package.py

View File

@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
Spack has a number of built-in bundle packages, such as: Spack has a number of built-in bundle packages, such as:
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_ * `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_ * `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_ * `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_ * `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
``Libc`` is a virtual bundle package for the C standard library. ``Libc`` is a virtual bundle package for the C standard library.

View File

@@ -199,7 +199,7 @@ a variant to control this:
However, not every CMake package accepts all four of these options. However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are Grep the ``CMakeLists.txt`` file to see if the default values are
missing or replaced. For example, the missing or replaced. For example, the
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_ `dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
package overrides the default variant with: package overrides the default variant with:
.. code-block:: python .. code-block:: python

View File

@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
focuses mostly on how Spack's build systems work. focuses mostly on how Spack's build systems work.
In this guide, we will be using the In this guide, we will be using the
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and `perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_ `cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
packages as examples. ``perl``'s build system is a hand-written packages as examples. ``perl``'s build system is a hand-written
``Configure`` shell script, while ``cmake`` bootstraps itself during ``Configure`` shell script, while ``cmake`` bootstraps itself during
installation. Both of these packages require custom build systems. installation. Both of these packages require custom build systems.

View File

@@ -33,6 +33,9 @@ For more information on a specific package, do::
spack info --all <package-name> spack info --all <package-name>
Intel no longer releases new versions of Parallel Studio, which can be
used in Spack via the :ref:`intelpackage`. All of its components can
now be found in oneAPI.
Examples Examples
======== ========
@@ -47,8 +50,34 @@ Install the oneAPI compilers::
spack install intel-oneapi-compilers spack install intel-oneapi-compilers
Add the compilers to your ``compilers.yaml`` so spack can use them::
To build the ``patchelf`` Spack package with ``icx``, do:: spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
Verify that the compilers are available::
spack compiler list
Note that 2024 and later releases do not include ``icc``. Before 2024,
the package layout was different::
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
The ``intel-oneapi-compilers`` package includes 2 families of
compilers:
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
compilers. 2024 and later releases contain ``ifort``, but not
``icc`` and ``icpc``.
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
compilers based on LLVM.
To build the ``patchelf`` Spack package with ``icc``, do::
spack install patchelf%intel
To build with with ``icx``, do ::
spack install patchelf%oneapi spack install patchelf%oneapi
@@ -63,6 +92,15 @@ Install the oneAPI compilers::
spack install intel-oneapi-compilers spack install intel-oneapi-compilers
Add the compilers to your ``compilers.yaml`` so Spack can use them::
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
Verify that the compilers are available::
spack compiler list
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment:: Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
git clone https://github.com/spack/spack-configs git clone https://github.com/spack/spack-configs
@@ -111,7 +149,7 @@ Compilers
--------- ---------
To use the compilers, add some information about the installation to To use the compilers, add some information about the installation to
``packages.yaml``. For most users, it is sufficient to do:: ``compilers.yaml``. For most users, it is sufficient to do::
spack compiler add /opt/intel/oneapi/compiler/latest/bin spack compiler add /opt/intel/oneapi/compiler/latest/bin
@@ -119,7 +157,7 @@ Adapt the paths above if you did not install the tools in the default
location. After adding the compilers, using them is the same location. After adding the compilers, using them is the same
as if you had installed the ``intel-oneapi-compilers`` package. as if you had installed the ``intel-oneapi-compilers`` package.
Another option is to manually add the configuration to Another option is to manually add the configuration to
``packages.yaml`` as described in :ref:`Compiler configuration ``compilers.yaml`` as described in :ref:`Compiler configuration
<compiler-config>`. <compiler-config>`.
Before 2024, the directory structure was different:: Before 2024, the directory structure was different::
@@ -162,5 +200,15 @@ You can also use Spack-installed libraries. For example::
Will update your environment CPATH, LIBRARY_PATH, and other Will update your environment CPATH, LIBRARY_PATH, and other
environment variables for building an application with oneMKL. environment variables for building an application with oneMKL.
More information
================
This section describes basic use of oneAPI, especially if it has
changed compared to Parallel Studio. See :ref:`intelpackage` for more
information on :ref:`intel-virtual-packages`,
:ref:`intel-unrelated-packages`,
:ref:`intel-integrating-external-libraries`, and
:ref:`using-mkl-tips`.
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html .. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html

File diff suppressed because it is too large Load Diff

View File

@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
.. code-block:: python .. code-block:: python
def setup_build_environment(self, env: EnvironmentModifications) -> None: def setup_build_environment(self, env):
env.set("PREFIX", prefix) env.set("PREFIX", prefix)
env.set("BLASLIB", spec["blas"].libs.ld_flags) env.set("BLASLIB", spec["blas"].libs.ld_flags)
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_ `cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
is a good example of a simple package that does this, while is a good example of a simple package that does this, while
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_ `esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
is a good example of a more complex package. is a good example of a more complex package.
"""""""""""""""""""""" """"""""""""""""""""""
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
] ]
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_ `cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
is a good example of a package that uses this strategy. is a good example of a package that uses this strategy.
""""""""""""" """""""""""""
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_ `stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
is a good example of a package that involves editing a Makefile to set is a good example of a package that involves editing a Makefile to set
the appropriate variables. the appropriate variables.
@@ -192,7 +192,7 @@ well for storing variables:
inc.write(f"{key} = {config[key]}\n") inc.write(f"{key} = {config[key]}\n")
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_ `elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
is a good example of a package that uses a dictionary to store is a good example of a package that uses a dictionary to store
configuration variables. configuration variables.
@@ -213,7 +213,7 @@ them in a list:
inc.write(f"{var}\n") inc.write(f"{var}\n")
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_ `hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
is a good example of a package that uses a list to store is a good example of a package that uses a list to store
configuration variables. configuration variables.

View File

@@ -12,7 +12,8 @@ The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPacka
it provides standard variants, dependencies, and conflicts to facilitate building it provides standard variants, dependencies, and conflicts to facilitate building
packages using GPUs though for AMD in this case. packages using GPUs though for AMD in this case.
You can find the source for this package (and suggestions for setting up your ``packages.yaml`` file) at You can find the source for this package (and suggestions for setting up your
``compilers.yaml`` and ``packages.yaml`` files) at
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__. `<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
^^^^^^^^ ^^^^^^^^

View File

@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
If it isn't on CRAN, try Bioconductor, another common R repository. If it isn't on CRAN, try Bioconductor, another common R repository.
For the purposes of this tutorial, we will be walking through For the purposes of this tutorial, we will be walking through
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_ `r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
as an example. If you search for "CRAN caret", you will quickly find what as an example. If you search for "CRAN caret", you will quickly find what
you are looking for at https://cran.r-project.org/package=caret. you are looking for at https://cran.r-project.org/package=caret.
https://cran.r-project.org is the main CRAN website. However, CRAN also https://cran.r-project.org is the main CRAN website. However, CRAN also
@@ -337,7 +337,7 @@ Non-R dependencies
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Some packages depend on non-R libraries for linking. Check out the Some packages depend on non-R libraries for linking. Check out the
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_ `r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
package for an example: https://cloud.r-project.org/package=stringi. package for an example: https://cloud.r-project.org/package=stringi.
If you search for the text "SystemRequirements", you will see: If you search for the text "SystemRequirements", you will see:
@@ -352,7 +352,7 @@ Passing arguments to the installation
Some R packages provide additional flags that can be passed to Some R packages provide additional flags that can be passed to
``R CMD INSTALL``, often to locate non-R dependencies. ``R CMD INSTALL``, often to locate non-R dependencies.
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_ `r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
is an example of this, and flags for linking to an MPI library. To pass is an example of this, and flags for linking to an MPI library. To pass
these to the installation command, you can override ``configure_args`` these to the installation command, you can override ``configure_args``
like so: like so:

View File

@@ -104,10 +104,10 @@ Finding available options
The first place to start when looking for a list of valid options to The first place to start when looking for a list of valid options to
build a package is ``scons --help``. Some packages like build a package is ``scons --help``. Some packages like
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_ `kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
don't bother overwriting the default SCons help message, so this isn't don't bother overwriting the default SCons help message, so this isn't
very useful, but other packages like very useful, but other packages like
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_ `serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
print a list of valid command-line variables: print a list of valid command-line variables:
.. code-block:: console .. code-block:: console
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
More advanced packages like More advanced packages like
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_ `cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
use ``scons --help`` to print a list of subcommands: use ``scons --help`` to print a list of subcommands:
.. code-block:: console .. code-block:: console

View File

@@ -225,14 +225,8 @@ def setup(sphinx):
("py:class", "llnl.util.lang.T"), ("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"), ("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"), ("py:class", "llnl.util.lang.VT"),
("py:class", "llnl.util.lang.K"),
("py:class", "llnl.util.lang.V"),
("py:class", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.KT"), ("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"), ("py:obj", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.K"),
("py:obj", "llnl.util.lang.V"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -148,16 +148,15 @@ this can expose you to attacks. Use at your own risk.
``ssl_certs`` ``ssl_certs``
-------------------- --------------------
Path to custom certificats for SSL verification. The value can be a Path to custom certificats for SSL verification. The value can be a
filesytem path, or an environment variable that expands to an absolute file path. filesytem path, or an environment variable that expands to an absolute file path.
The default value is set to the environment variable ``SSL_CERT_FILE`` The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically to use the same syntax used by many other applications that automatically
detect custom certificates. detect custom certificates.
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE`` a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
in the subprocess calling ``curl``. If additional ``curl`` arguments are required, in the subprocess calling ``curl``.
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``. If ``url_fetch_method:urllib`` then files and directories are supported i.e.
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR`` ``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work. will work.
In all cases the expanded path must be absolute for Spack to use the certificates. In all cases the expanded path must be absolute for Spack to use the certificates.

View File

@@ -11,7 +11,7 @@ Configuration Files
Spack has many configuration files. Here is a quick list of them, in Spack has many configuration files. Here is a quick list of them, in
case you want to skip directly to specific docs: case you want to skip directly to specific docs:
* :ref:`packages.yaml <compiler-config>` * :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>` * :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>` * :ref:`config.yaml <config-yaml>`
* :ref:`include.yaml <include-yaml>` * :ref:`include.yaml <include-yaml>`
@@ -46,12 +46,6 @@ Each Spack configuration file is nested under a top-level section
corresponding to its name. So, ``config.yaml`` starts with ``config:``, corresponding to its name. So, ``config.yaml`` starts with ``config:``,
``mirrors.yaml`` starts with ``mirrors:``, etc. ``mirrors.yaml`` starts with ``mirrors:``, etc.
.. tip::
Validation and autocompletion of Spack config files can be enabled in
your editor with the YAML language server. See `spack/schemas
<https://github.com/spack/schemas>`_ for more information.
.. _configuration-scopes: .. _configuration-scopes:
-------------------- --------------------
@@ -101,7 +95,7 @@ are six configuration scopes. From lowest to highest:
precedence over all other scopes. precedence over all other scopes.
Each configuration directory may contain several configuration files, Each configuration directory may contain several configuration files,
such as ``config.yaml``, ``packages.yaml``, or ``mirrors.yaml``. When such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
configurations conflict, settings from higher-precedence scopes override configurations conflict, settings from higher-precedence scopes override
lower-precedence settings. lower-precedence settings.

View File

@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
Modified files: Modified files:
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py var/spack/repos/builtin/packages/hdf5/package.py
var/spack/repos/spack_repo/builtin/packages/hdf/package.py var/spack/repos/builtin/packages/hdf/package.py
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py var/spack/repos/builtin/packages/netcdf/package.py
======================================================= =======================================================
Flake8 checks were clean. Flake8 checks were clean.
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
.. code-block:: console .. code-block:: console
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2) var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters) var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
Flake8 found errors. Flake8 found errors.
Most of the error messages are straightforward, but if you don't understand what Most of the error messages are straightforward, but if you don't understand what
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
.. warning:: .. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_. Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
If you're using a ``python`` from Spack and you installed If you're using a ``python`` from Spack and you installed
``py-sphinx`` and friends, you need to make them available to your ``py-sphinx`` and friends, you need to make them available to your
``python``. The easiest way to do this is to run: ``python``. The easiest way to do this is to run:

View File

@@ -154,7 +154,9 @@ Package-related modules
:mod:`spack.util.naming` :mod:`spack.util.naming`
Contains functions for mapping between Spack package names, Contains functions for mapping between Spack package names,
Python module names, and Python class names. Python module names, and Python class names. Functions like
:func:`~spack.util.naming.mod_to_class` handle mapping package
module names to class names.
:mod:`spack.directives` :mod:`spack.directives`
*Directives* are functions that can be called inside a package definition *Directives* are functions that can be called inside a package definition

View File

@@ -1,34 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _env-vars-yaml:
=============================================
Environment Variable Settings (env_vars.yaml)
=============================================
Spack allows you to include shell environment variable modifications
for a spack environment by including an ``env_vars.yaml``. Environment
varaibles can be modified by setting, unsetting, appending, and prepending
variables in the shell environment.
The changes to the shell environment will take effect when the spack
environment is activated.
for example,
.. code-block:: yaml
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"

View File

@@ -667,11 +667,11 @@ a ``packages.yaml`` file) could contain:
# ... # ...
packages: packages:
all: all:
providers: compiler: [intel]
mpi: [openmpi]
# ... # ...
This configuration sets the default mpi provider to be openmpi. This configuration sets the default compiler for all packages to
``intel``.
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Included configurations Included configurations
@@ -686,8 +686,7 @@ the environment.
spack: spack:
include: include:
- environment/relative/path/to/config.yaml - environment/relative/path/to/config.yaml
- path: https://github.com/path/to/raw/config/compilers.yaml - https://github.com/path/to/raw/config/compilers.yaml
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
- /absolute/path/to/packages.yaml - /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment - path: /path/to/$os/$target/environment
optional: true optional: true
@@ -701,11 +700,11 @@ with the ``optional`` clause and conditional with the ``when`` clause. (See
Files are listed using paths to individual files or directories containing them. Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files must link to the **raw** form of the file's URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_ <https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file. <https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used. supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.) (See :ref:`config-file-variables` for more information.)
@@ -1000,28 +999,6 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages This allows for a much-needed reduction in redundancy between packages
and constraints. and constraints.
-------------------------------
Modifying Environment Variables
-------------------------------
Spack Environments can modify the active shell's environment variables when activated. The environment can be
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
file:
.. code-block:: yaml
spack:
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"
----------------- -----------------
Environment Views Environment Views

View File

@@ -0,0 +1,161 @@
spack:
definitions:
- compiler-pkgs:
- 'llvm+clang@6.0.1 os=centos7'
- 'gcc@6.5.0 os=centos7'
- 'llvm+clang@6.0.1 os=ubuntu18.04'
- 'gcc@6.5.0 os=ubuntu18.04'
- pkgs:
- readline@7.0
# - xsdk@0.4.0
- compilers:
- '%gcc@5.5.0'
- '%gcc@6.5.0'
- '%gcc@7.3.0'
- '%clang@6.0.0'
- '%clang@6.0.1'
- oses:
- os=ubuntu18.04
- os=centos7
specs:
- matrix:
- [$pkgs]
- [$compilers]
- [$oses]
exclude:
- '%gcc@7.3.0 os=centos7'
- '%gcc@5.5.0 os=ubuntu18.04'
mirrors:
cloud_gitlab: https://mirror.spack.io
compilers:
# The .gitlab-ci.yml for this project picks a Docker container which does
# not have any compilers pre-built and ready to use, so we need to fake the
# existence of those here.
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@5.5.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@7.3.0
target: x86_64
gitlab-ci:
bootstrap:
- name: compiler-pkgs
compiler-agnostic: true
mappings:
- # spack-cloud-ubuntu
match:
# these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job
- os=ubuntu18.04
runner-attributes:
# 'tags' and 'image' go directly onto the job, 'variables' will
# be added to what we already necessarily create for the job as
# a part of the CI workflow
tags:
- spack-k8s
image:
name: scottwittenburg/spack_builder_ubuntu_18.04
entrypoint: [""]
- # spack-cloud-centos
match:
# these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job
- 'os=centos7'
runner-attributes:
tags:
- spack-k8s
image:
name: scottwittenburg/spack_builder_centos_7
entrypoint: [""]
cdash:
build-group: Release Testing
url: http://cdash
project: Spack Testing
site: Spack Docker-Compose Workflow
repos: []
upstreams: {}
modules:
enable: []
packages: {}
config: {}

View File

@@ -131,7 +131,7 @@ creates a simple python file:
It doesn't take much python coding to get from there to a working It doesn't take much python coding to get from there to a working
package: package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:lines: 5- :lines: 5-
Spack also provides wrapper functions around common commands like Spack also provides wrapper functions around common commands like

View File

@@ -254,11 +254,12 @@ directory.
Compiler configuration Compiler configuration
---------------------- ----------------------
Spack has the ability to build packages with multiple compilers and compiler versions. Spack has the ability to build packages with multiple compilers and
Compilers can be made available to Spack by specifying them manually in ``packages.yaml``, compiler versions. Compilers can be made available to Spack by
or automatically by running ``spack compiler find``. specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
For convenience, Spack will automatically detect compilers the first time it needs them, or automatically by running ``spack compiler find``, but for
if none is available. convenience Spack will automatically detect compilers the first time
it needs them.
.. _cmd-spack-compilers: .. _cmd-spack-compilers:
@@ -273,11 +274,16 @@ compilers`` or ``spack compiler list``:
$ spack compilers $ spack compilers
==> Available compilers ==> Available compilers
-- gcc ubuntu20.04-x86_64 --------------------------------------- -- gcc ---------------------------------------------------------
gcc@9.4.0 gcc@8.4.0 gcc@10.5.0 gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
-- llvm ubuntu20.04-x86_64 -------------------------------------- -- intel -------------------------------------------------------
llvm@12.0.0 llvm@11.0.0 llvm@10.0.0 intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
-- clang -------------------------------------------------------
clang@3.4 clang@3.3 clang@3.2 clang@3.1
Any of these compilers can be used to build Spack packages. More on Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`. how this is done is in :ref:`sec-specs`.
@@ -296,22 +302,16 @@ An alias for ``spack compiler find``.
``spack compiler find`` ``spack compiler find``
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
If you do not see a compiler in the list shown by: Lists the compilers currently available to Spack. If you do not see
a compiler in this list, but you want to use it with Spack, you can
simply run ``spack compiler find`` with the path to where the
compiler is installed. For example:
.. code-block:: console .. code-block:: console
$ spack compiler list $ spack compiler find /usr/local/tools/ic-13.0.079
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
but you want to use it with Spack, you can simply run ``spack compiler find`` with the intel@13.0.079
path to where the compiler is installed. For example:
.. code-block:: console
$ spack compiler find /opt/intel/oneapi/compiler/2025.1/bin/
==> Added 1 new compiler to /home/user/.spack/packages.yaml
intel-oneapi-compilers@2025.1.0
==> Compilers are defined in the following files:
/home/user/.spack/packages.yaml
Or you can run ``spack compiler find`` with no arguments to force Or you can run ``spack compiler find`` with no arguments to force
auto-detection. This is useful if you do not know where compilers are auto-detection. This is useful if you do not know where compilers are
@@ -322,7 +322,7 @@ installed, but you know that new compilers have been added to your
$ module load gcc/4.9.0 $ module load gcc/4.9.0
$ spack compiler find $ spack compiler find
==> Added 1 new compiler to /home/user/.spack/packages.yaml ==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
gcc@4.9.0 gcc@4.9.0
This loads the environment module for gcc-4.9.0 to add it to This loads the environment module for gcc-4.9.0 to add it to
@@ -331,7 +331,7 @@ This loads the environment module for gcc-4.9.0 to add it to
.. note:: .. note::
By default, spack does not fill in the ``modules:`` field in the By default, spack does not fill in the ``modules:`` field in the
``packages.yaml`` file. If you are using a compiler from a ``compilers.yaml`` file. If you are using a compiler from a
module, then you should add this field manually. module, then you should add this field manually.
See the section on :ref:`compilers-requiring-modules`. See the section on :ref:`compilers-requiring-modules`.
@@ -341,82 +341,91 @@ This loads the environment module for gcc-4.9.0 to add it to
``spack compiler info`` ``spack compiler info``
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
If you want to see additional information on some specific compilers, you can run ``spack compiler info`` on it: If you want to see specifics on a particular compiler, you can run
``spack compiler info`` on it:
.. code-block:: console .. code-block:: console
$ spack compiler info gcc $ spack compiler info intel@15
gcc@=8.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64: intel@15.0.0:
prefix: /usr paths:
compilers: cc = /usr/local/bin/icc-15.0.090
c: /usr/bin/gcc-8 cxx = /usr/local/bin/icpc-15.0.090
cxx: /usr/bin/g++-8 f77 = /usr/local/bin/ifort-15.0.090
fortran: /usr/bin/gfortran-8 fc = /usr/local/bin/ifort-15.0.090
modules = []
operating_system = centos6
...
gcc@=9.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64: This shows which C, C++, and Fortran compilers were detected by Spack.
prefix: /usr Notice also that we didn't have to be too specific about the
compilers: version. We just said ``intel@15``, and information about the only
c: /usr/bin/gcc matching Intel compiler was displayed.
cxx: /usr/bin/g++
fortran: /usr/bin/gfortran
gcc@=10.5.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
prefix: /usr
compilers:
c: /usr/bin/gcc-10
cxx: /usr/bin/g++-10
fortran: /usr/bin/gfortran-10
This shows the details of the compilers that were detected by Spack.
Notice also that we didn't have to be too specific about the version. We just said ``gcc``, and we got information
about all the matching compilers.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Manual compiler configuration Manual compiler configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If auto-detection fails, you can manually configure a compiler by editing your ``~/.spack/packages.yaml`` file. If auto-detection fails, you can manually configure a compiler by
You can do this by running ``spack config edit packages``, which will open the file in editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
``spack config edit compilers``, which will open the file in
:ref:`your favorite editor <controlling-the-editor>`. :ref:`your favorite editor <controlling-the-editor>`.
Each compiler has an "external" entry in the file with some ``extra_attributes``: Each compiler configuration in the file looks like this:
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
gcc: - compiler:
externals: modules: []
- spec: gcc@10.5.0 languages='c,c++,fortran' operating_system: centos6
prefix: /usr paths:
extra_attributes: cc: /usr/local/bin/icc-15.0.024-beta
compilers: cxx: /usr/local/bin/icpc-15.0.024-beta
c: /usr/bin/gcc-10 f77: /usr/local/bin/ifort-15.0.024-beta
cxx: /usr/bin/g++-10 fc: /usr/local/bin/ifort-15.0.024-beta
fortran: /usr/bin/gfortran-10 spec: intel@15.0.0
The compiler executables are listed under ``extra_attributes:compilers``, and are keyed by language. For compilers that do not support Fortran (like ``clang``), put
Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``. ``None`` for ``f77`` and ``fc``:
You can also add compiler flags to manually configured compilers. These flags should be specified in the .. code-block:: yaml
``flags`` section of the compiler specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
compilers:
- compiler:
modules: []
operating_system: centos6
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++
f77: None
fc: None
spec: clang@3.3svn
Once you save the file, the configured compilers will show up in the
list displayed by ``spack compilers``.
You can also add compiler flags to manually configured compilers. These
flags should be specified in the ``flags`` section of the compiler
specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
``cppflags``, ``ldflags``, and ``ldlibs``. For example: ``cppflags``, ``ldflags``, and ``ldlibs``. For example:
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
gcc: - compiler:
externals: modules: []
- spec: gcc@10.5.0 languages='c,c++,fortran' operating_system: centos6
prefix: /usr paths:
extra_attributes: cc: /usr/bin/gcc
compilers: cxx: /usr/bin/g++
c: /usr/bin/gcc-10 f77: /usr/bin/gfortran
cxx: /usr/bin/g++-10 fc: /usr/bin/gfortran
fortran: /usr/bin/gfortran-10 flags:
flags: cflags: -O3 -fPIC
cflags: -O3 -fPIC cxxflags: -O3 -fPIC
cxxflags: -O3 -fPIC cppflags: -O3 -fPIC
cppflags: -O3 -fPIC spec: gcc@4.7.2
These flags will be treated by spack as if they were entered from These flags will be treated by spack as if they were entered from
the command line each time this compiler is used. The compiler wrappers the command line each time this compiler is used. The compiler wrappers
@@ -431,44 +440,95 @@ These variables should be specified in the ``environment`` section of the compil
specification. The operations available to modify the environment are ``set``, ``unset``, specification. The operations available to modify the environment are ``set``, ``unset``,
``prepend_path``, ``append_path``, and ``remove_path``. For example: ``prepend_path``, ``append_path``, and ``remove_path``. For example:
.. code-block:: yaml
compilers:
- compiler:
modules: []
operating_system: centos6
paths:
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
spec: oneapi@latest
environment:
set:
MKL_ROOT: "/path/to/mkl/root"
unset: # A list of environment variables to unset
- CC
prepend_path: # Similar for append|remove_path
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
.. note::
Spack is in the process of moving compilers from a separate
attribute to be handled like all other packages. As part of this
process, the ``compilers.yaml`` section will eventually be replaced
by configuration in the ``packages.yaml`` section. This new
configuration is now available, although it is not yet the default
behavior.
Compilers can also be configured as external packages in the
``packages.yaml`` config file. Any external package for a compiler
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
assuming the paths to the compiler executables are determinable from
the prefix.
If the paths to the compiler executable are not determinable from the
prefix, you can add them to the ``extra_attributes`` field. Similarly,
all other fields from the compilers config can be added to the
``extra_attributes`` field for an external representing a compiler.
Note that the format for the ``paths`` field in the
``extra_attributes`` section is different than in the ``compilers``
config. For compilers configured as external packages, the section is
named ``compilers`` and the dictionary maps language names (``c``,
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
``fc``, and ``f77``.
.. code-block:: yaml .. code-block:: yaml
packages: packages:
intel-oneapi-compilers: gcc:
externals: external:
- spec: intel-oneapi-compilers@2025.1.0 - spec: gcc@12.2.0 arch=linux-rhel8-skylake
prefix: /opt/intel/oneapi prefix: /usr
extra_attributes: extra_attributes:
compilers:
c: /opt/intel/oneapi/compiler/2025.1/bin/icx
cxx: /opt/intel/oneapi/compiler/2025.1/bin/icpx
fortran: /opt/intel/oneapi/compiler/2025.1/bin/ifx
environment: environment:
set: set:
MKL_ROOT: "/path/to/mkl/root" GCC_ROOT: /usr
unset: # A list of environment variables to unset external:
- CC - spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
prepend_path: # Similar for append|remove_path prefix: /usr
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh extra_attributes:
compilers:
c: /usr/bin/clang-with-suffix
cxx: /usr/bin/clang++-with-extra-info
fortran: /usr/bin/gfortran
extra_rpaths:
- /usr/lib/llvm/
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Build Your Own Compiler Build Your Own Compiler
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
If you are particular about which compiler/version you use, you might wish to have Spack build it for you. If you are particular about which compiler/version you use, you might
For example: wish to have Spack build it for you. For example:
.. code-block:: console .. code-block:: console
$ spack install gcc@14+binutils $ spack install gcc@4.9.3
Once the compiler is installed, you can start using it without additional configuration: Once that has finished, you will need to add it to your
``compilers.yaml`` file. You can then set Spack to use it by default
by adding the following to your ``packages.yaml`` file:
.. code-block:: console .. code-block:: yaml
$ spack install hdf5~mpi %gcc@14 packages:
all:
The same holds true for compilers that are made available from buildcaches, when reusing them is allowed. compiler: [gcc@4.9.3]
.. _compilers-requiring-modules: .. _compilers-requiring-modules:
@@ -476,26 +536,30 @@ The same holds true for compilers that are made available from buildcaches, when
Compilers Requiring Modules Compilers Requiring Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Many installed compilers will work regardless of the environment they are called with. Many installed compilers will work regardless of the environment they
However, some installed compilers require environment variables to be set in order to run; are called with. However, some installed compilers require
this is typical for Intel and other proprietary compilers. ``$LD_LIBRARY_PATH`` or other environment variables to be set in order
to run; this is typical for Intel and other proprietary compilers.
On typical HPC clusters, these environment modifications are usually delegated to some "module" system. In such a case, you should tell Spack which module(s) to load in order
In such a case, you should tell Spack which module(s) to load in order to run the chosen compiler: to run the chosen compiler (If the compiler does not come with a
module file, you might consider making one by hand). Spack will load
this module into the environment ONLY when the compiler is run, and
NOT in general for a package's ``install()`` method. See, for
example, this ``compilers.yaml`` file:
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
gcc: - compiler:
externals: modules: [other/comp/gcc-5.3-sp3]
- spec: gcc@10.5.0 languages='c,c++,fortran' operating_system: SuSE11
prefix: /opt/compilers paths:
extra_attributes: cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
compilers: cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
c: /opt/compilers/bin/gcc-10 f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
cxx: /opt/compilers/bin/g++-10 fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
fortran: /opt/compilers/bin/gfortran-10 spec: gcc@5.3.0
modules: [gcc/10.5.0]
Some compilers require special environment settings to be loaded not just Some compilers require special environment settings to be loaded not just
to run, but also to execute the code they build, breaking packages that to run, but also to execute the code they build, breaking packages that
@@ -516,7 +580,7 @@ Licensed Compilers
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Some proprietary compilers require licensing to use. If you need to Some proprietary compilers require licensing to use. If you need to
use a licensed compiler, the process is similar to a mix of use a licensed compiler (eg, PGI), the process is similar to a mix of
build your own, plus modules: build your own, plus modules:
#. Create a Spack package (if it doesn't exist already) to install #. Create a Spack package (if it doesn't exist already) to install
@@ -526,21 +590,24 @@ build your own, plus modules:
using Spack to load the module it just created, and running simple using Spack to load the module it just created, and running simple
builds (eg: ``cc helloWorld.c && ./a.out``) builds (eg: ``cc helloWorld.c && ./a.out``)
#. Add the newly-installed compiler to ``packages.yaml`` as shown above. #. Add the newly-installed compiler to ``compilers.yaml`` as shown
above.
.. _mixed-toolchains: .. _mixed-toolchains:
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
Fortran compilers on macOS Mixed Toolchains
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
Modern compilers typically come with related compilers for C, C++ and Modern compilers typically come with related compilers for C, C++ and
Fortran bundled together. When possible, results are best if the same Fortran bundled together. When possible, results are best if the same
compiler is used for all languages. compiler is used for all languages.
In some cases, this is not possible. For example, XCode on macOS provides no Fortran compilers. In some cases, this is not possible. For example, starting with macOS El
The user is therefore forced to use a mixed toolchain: XCode-provided Clang for C/C++ and e.g. Capitan (10.11), many packages no longer build with GCC, but XCode
GNU ``gfortran`` for Fortran. provides no Fortran compilers. The user is therefore forced to use a
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
Fortran.
#. You need to make sure that Xcode is installed. Run the following command: #. You need to make sure that Xcode is installed. Run the following command:
@@ -593,25 +660,45 @@ GNU ``gfortran`` for Fortran.
Note: the flag is ``-license``, not ``--license``. Note: the flag is ``-license``, not ``--license``.
#. Run ``spack compiler find`` to locate Clang.
#. There are different ways to get ``gfortran`` on macOS. For example, you can #. There are different ways to get ``gfortran`` on macOS. For example, you can
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
gcc``), or from a `DMG installer gcc``), or from a `DMG installer
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_. <https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
#. Run ``spack compiler find`` to locate both Apple-Clang and GCC. #. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
the path to ``gfortran``:
Since languages in Spack are modeled as virtual packages, ``apple-clang`` will be used to provide .. code-block:: yaml
C and C++, while GCC will be used for Fortran.
compilers:
- compiler:
# ...
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++
f77: /path/to/bin/gfortran
fc: /path/to/bin/gfortran
spec: apple-clang@11.0.0
If you used Spack to install GCC, you can get the installation prefix by
``spack location -i gcc`` (this will only work if you have a single version
of GCC installed). Whereas for Homebrew, GCC is installed in
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
will be ``/usr/local/gfortran``.
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Compiler Verification Compiler Verification
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
You can verify that your compilers are configured properly by installing a simple package. For example: You can verify that your compilers are configured properly by installing a
simple package. For example:
.. code-block:: console .. code-block:: console
$ spack install zlib-ng%gcc@5.3.0 $ spack install zlib%gcc@5.3.0
.. _vendor-specific-compiler-configuration: .. _vendor-specific-compiler-configuration:
@@ -620,7 +707,9 @@ You can verify that your compilers are configured properly by installing a simpl
Vendor-Specific Compiler Configuration Vendor-Specific Compiler Configuration
-------------------------------------- --------------------------------------
This section provides details on how to get vendor-specific compilers working. With Spack, things usually "just work" with GCC. Not so for other
compilers. This section provides details on how to get specific
compilers working.
^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^
Intel Compilers Intel Compilers
@@ -642,8 +731,8 @@ compilers:
you have installed from the ``PATH`` environment variable. you have installed from the ``PATH`` environment variable.
If you want use a version of ``gcc`` or ``g++`` other than the default If you want use a version of ``gcc`` or ``g++`` other than the default
version on your system, you need to use either the ``--gcc-install-dir`` version on your system, you need to use either the ``-gcc-name``
or ``--gcc-toolchain`` compiler option to specify the path to the version of or ``-gxx-name`` compiler option to specify the path to the version of
``gcc`` or ``g++`` that you want to use." ``gcc`` or ``g++`` that you want to use."
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_ -- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
@@ -651,12 +740,76 @@ compilers:
Intel compilers may therefore be configured in one of two ways with Intel compilers may therefore be configured in one of two ways with
Spack: using modules, or using compiler flags. Spack: using modules, or using compiler flags.
""""""""""""""""""""""""""
Configuration with Modules
""""""""""""""""""""""""""
One can control which GCC is seen by the Intel compiler with modules.
A module must be loaded both for the Intel Compiler (so it will run)
and GCC (so the compiler can find the intended GCC). The following
configuration in ``compilers.yaml`` illustrates this technique:
.. code-block:: yaml
compilers:
- compiler:
modules: [gcc-4.9.3, intel-15.0.24]
operating_system: centos7
paths:
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
spec: intel@15.0.24.4.9.3
.. note::
The version number on the Intel compiler is a combination of
the "native" Intel version number and the GNU compiler it is
targeting.
""""""""""""""""""""""""""
Command Line Configuration
""""""""""""""""""""""""""
One can also control which GCC is seen by the Intel compiler by adding
flags to the ``icc`` command:
#. Identify the location of the compiler you just installed:
.. code-block:: console
$ spack location --install-dir gcc
~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
#. Set up ``compilers.yaml``, for example:
.. code-block:: yaml
compilers:
- compiler:
modules: [intel-15.0.24]
operating_system: centos7
paths:
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
flags:
cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
spec: intel@15.0.24.4.9.3
^^^ ^^^
NAG NAG
^^^ ^^^
The Numerical Algorithms Group provides a licensed Fortran compiler. The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
It is recommended to use GCC for your C/C++ compilers. this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
GCC for your C/C++ compilers.
The NAG Fortran compilers are a bit more strict than other compilers, and many The NAG Fortran compilers are a bit more strict than other compilers, and many
packages will fail to install with error messages like: packages will fail to install with error messages like:
@@ -673,40 +826,44 @@ the command line:
$ spack install openmpi fflags="-mismatch" $ spack install openmpi fflags="-mismatch"
Or it can be set permanently in your ``packages.yaml``: Or it can be set permanently in your ``compilers.yaml``:
.. code-block:: yaml .. code-block:: yaml
packages: - compiler:
nag: modules: []
externals: operating_system: centos6
- spec: nag@6.1 paths:
prefix: /opt/nag/bin cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
extra_attributes: cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
compilers: f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
fortran: /opt/nag/bin/nagfor fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
flags: flags:
fflags: -mismatch fflags: -mismatch
spec: nag@6.1
--------------- ---------------
System Packages System Packages
--------------- ---------------
Once compilers are configured, one needs to determine which pre-installed system packages, Once compilers are configured, one needs to determine which
if any, to use in builds. These are also configured in the ``~/.spack/packages.yaml`` file. pre-installed system packages, if any, to use in builds. This is
For example, to use an OpenMPI installed in /opt/local, one would use: configured in the file ``~/.spack/packages.yaml``. For example, to use
an OpenMPI installed in /opt/local, one would use:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
openmpi: openmpi:
buildable: False externals:
externals: - spec: openmpi@1.10.1
- spec: openmpi@1.10.1 prefix: /opt/local
prefix: /opt/local buildable: False
In general, *Spack is easier to use and more reliable if it builds all of its own dependencies*. In general, Spack is easier to use and more reliable if it builds all of
However, there are several packages for which one commonly needs to use system versions: its own dependencies. However, there are several packages for which one
commonly needs to use system versions:
^^^ ^^^
MPI MPI
@@ -719,7 +876,8 @@ you are unlikely to get a working MPI from Spack. Instead, use an
appropriate pre-installed MPI. appropriate pre-installed MPI.
If you choose a pre-installed MPI, you should consider using the If you choose a pre-installed MPI, you should consider using the
pre-installed compiler used to build that MPI. pre-installed compiler used to build that MPI; see above on
``compilers.yaml``.
^^^^^^^ ^^^^^^^
OpenSSL OpenSSL
@@ -1283,9 +1441,9 @@ To configure Spack, first run the following command inside the Spack console:
spack compiler find spack compiler find
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
containing a ``packages.yaml`` file. On a fresh Windows install with the above packages containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
compiler will be integrated within the first version of MSVC present in the ``packages.yaml`` compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
output. output.
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden. Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.

View File

@@ -23,6 +23,7 @@ components for use by dependent packages:
packages: packages:
all: all:
compiler: [rocmcc@=5.3.0]
variants: amdgpu_target=gfx90a variants: amdgpu_target=gfx90a
hip: hip:
buildable: false buildable: false
@@ -69,15 +70,16 @@ This is in combination with the following compiler definition:
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
llvm-amdgpu: - compiler:
externals: spec: rocmcc@=5.3.0
- spec: llvm-amdgpu@=5.3.0 paths:
prefix: /opt/rocm-5.3.0 cc: /opt/rocm-5.3.0/bin/amdclang
compilers: cxx: /opt/rocm-5.3.0/bin/amdclang++
c: /opt/rocm-5.3.0/bin/amdclang f77: null
cxx: /opt/rocm-5.3.0/bin/amdclang++ fc: /opt/rocm-5.3.0/bin/amdflang
fortran: null operating_system: rhel8
target: x86_64
This includes the following considerations: This includes the following considerations:

View File

@@ -43,20 +43,6 @@ or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protoco
schemes) are supported. Spack-specific, environment and user path variables schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.) can be used. (See :ref:`config-file-variables` for more information.)
A ``sha256`` is required for remote file URLs and must be specified as follows:
.. code-block:: yaml
include:
- path: https://github.com/path/to/raw/config/compilers.yaml
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
Additionally, remote file URLs must link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
.. warning:: .. warning::
Recursive includes are not currently processed in a breadth-first manner Recursive includes are not currently processed in a breadth-first manner

View File

@@ -75,7 +75,6 @@ or refer to the full manual below.
packages_yaml packages_yaml
build_settings build_settings
environments environments
env_vars_yaml
containers containers
mirrors mirrors
module_file_support module_file_support

View File

@@ -128,7 +128,7 @@ depend on the spec:
.. code-block:: python .. code-block:: python
def setup_run_environment(self, env: EnvironmentModifications) -> None: def setup_run_environment(self, env):
if self.spec.satisfies("+foo"): if self.spec.satisfies("+foo"):
env.set("FOO", "bar") env.set("FOO", "bar")
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
.. code-block:: python .. code-block:: python
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None: def setup_dependent_run_environment(self, env, dependent_spec):
if dependent_spec.package.extends(self.spec): if dependent_spec.package.extends(self.spec):
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python) env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)

View File

@@ -557,13 +557,14 @@ preferences.
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>` FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
The ``target`` and ``providers`` preferences Most package preferences (``compilers``, ``target`` and ``providers``)
can only be set globally under the ``all`` section of ``packages.yaml``: can only be set globally under the ``all`` section of ``packages.yaml``:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
all: all:
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
target: [x86_64_v3] target: [x86_64_v3]
providers: providers:
mpi: [mvapich2, mpich, openmpi] mpi: [mvapich2, mpich, openmpi]

View File

@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`. no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
Examples where bundle packages can be useful include defining suites of Examples where bundle packages can be useful include defining suites of
applications (e.g, `EcpProxyApps applications (e.g, `EcpProxyApps
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_), (e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_). and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
These versioned packages primarily consist of dependencies on the associated These versioned packages primarily consist of dependencies on the associated
software packages. They can include :ref:`variants <variants>` to ensure software packages. They can include :ref:`variants <variants>` to ensure
@@ -443,7 +443,7 @@ lives in:
.. code-block:: console .. code-block:: console
$ spack location -p gmp $ spack location -p gmp
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py ${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
but ``spack edit`` provides a much simpler shortcut and saves you the but ``spack edit`` provides a much simpler shortcut and saves you the
trouble of typing the full path. trouble of typing the full path.
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
handles creating package files for you, so you can skip most of the handles creating package files for you, so you can skip most of the
details here. details here.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``var/spack/repos/spack_repo/builtin/packages`` ``var/spack/repos/builtin/packages``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Spack installation directory is structured like a standard UNIX A Spack installation directory is structured like a standard UNIX
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``, install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``. etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``. Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
If you ``cd`` to that directory, you will see directories for each If you ``cd`` to that directory, you will see directories for each
package: package:
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls .. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
:shell: :shell:
:ellipsis: 10 :ellipsis: 10
@@ -479,7 +479,7 @@ package lives in:
.. code-block:: none .. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py $SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
Alongside the ``package.py`` file, a package may contain extra Alongside the ``package.py`` file, a package may contain extra
directories or files (like patches) that it needs to build. directories or files (like patches) that it needs to build.
@@ -492,7 +492,7 @@ Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``. ``libelf``'s ``package.py`` lives in a directory called ``libelf``.
The ``package.py`` file defines a class called ``Libelf``, which The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. For example, here is extends Spack's ``Package`` class. For example, here is
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``: ``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
.. code-block:: python .. code-block:: python
:linenos: :linenos:
@@ -520,7 +520,7 @@ these:
$ spack install libelf@0.8.13 $ spack install libelf@0.8.13
Spack sees the package name in the spec and looks for Spack sees the package name in the spec and looks for
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``. ``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
Likewise, if you run ``spack install py-numpy``, Spack looks for Likewise, if you run ``spack install py-numpy``, Spack looks for
``py-numpy/package.py``. ``py-numpy/package.py``.
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
In order to handle this, you can define a ``url_for_version()`` function In order to handle this, you can define a ``url_for_version()`` function
like so: like so:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.url_for_version :pyobject: Openmpi.url_for_version
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1`` With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
takes care of all of the plumbing and requires packagers to just define a proper takes care of all of the plumbing and requires packagers to just define a proper
``gnu_mirror_path`` attribute: ``gnu_mirror_path`` attribute:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
:lines: 9-18 :lines: 9-18
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1995,7 +1995,7 @@ structure like this:
.. code-block:: none .. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/ $SPACK_ROOT/var/spack/repos/builtin/packages/
mvapich2/ mvapich2/
package.py package.py
ad_lustre_rwcontig_open_source.patch ad_lustre_rwcontig_open_source.patch
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
.. _pyside-patch: .. _pyside-patch:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
:pyobject: PyPyside.patch :pyobject: PyPyside.patch
:linenos: :linenos:
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
$ spack resource show 3877ab54 $ spack resource show 3877ab54
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00 3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
applies to: builtin.m4 applies to: builtin.m4
``spack resource show`` looks up downloadable resources from package ``spack resource show`` looks up downloadable resources from package
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64 ^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
$ spack resource show b37164268 $ spack resource show b37164268
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
applies to: builtin.boost applies to: builtin.boost
patched by: builtin.dealii patched by: builtin.dealii
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
The Qt package, for instance, uses this call: The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment :pyobject: Qt.setup_dependent_build_environment
:linenos: :linenos:
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An :meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
example of this can be found in the ``Python`` package: example of this can be found in the ``Python`` package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package :pyobject: Python.setup_dependent_package
:linenos: :linenos:
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
build system specific helper methods or attributes to provide, for instance, build system specific helper methods or attributes to provide, for instance,
configure arguments: configure arguments:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
:pyobject: M4.configure_args :pyobject: M4.configure_args
:linenos: :linenos:
@@ -4110,7 +4110,7 @@ Shell command functions
Recall the install method from ``libelf``: Recall the install method from ``libelf``:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:pyobject: Libelf.install :pyobject: Libelf.install
:linenos: :linenos:
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
additional properties on it to help you out. E.g., in openmpi, you'll additional properties on it to help you out. E.g., in openmpi, you'll
find this: find this:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.setup_dependent_package :pyobject: Openmpi.setup_dependent_package
That code allows the ``openmpi`` package to associate an ``mpicc`` property That code allows the ``openmpi`` package to associate an ``mpicc`` property
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
* - Parent/Provider Package * - Parent/Provider Package
- Stand-alone Tests - Stand-alone Tests
* - `C * - `C
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
- Compiles ``hello.c`` and runs it - Compiles ``hello.c`` and runs it
* - `Cxx * - `Cxx
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
- Compiles and runs several ``hello`` programs - Compiles and runs several ``hello`` programs
* - `Fortran * - `Fortran
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
- Compiles and runs ``hello`` programs (``F`` and ``f90``) - Compiles and runs ``hello`` programs (``F`` and ``f90``)
* - `Mpi * - `Mpi
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``) - Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
* - :ref:`PythonPackage <pythonpackage>` * - :ref:`PythonPackage <pythonpackage>`
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball - Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
One example of a package that adds its own stand-alone tests to those One example of a package that adds its own stand-alone tests to those
"inherited" by the virtual package it provides an implementation for is "inherited" by the virtual package it provides an implementation for is
the `Openmpi package the `Openmpi package
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_. <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
Below are snippets from running and viewing the stand-alone test results Below are snippets from running and viewing the stand-alone test results
for ``openmpi``: for ``openmpi``:

View File

@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
================================= =================================
Spack comes with thousands of built-in package recipes in Spack comes with thousands of built-in package recipes in
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a ``var/spack/repos/builtin/``. This is a **package repository** -- a
directory that Spack searches when it needs to find a package by name. directory that Spack searches when it needs to find a package by name.
You may need to maintain packages for restricted, proprietary or You may need to maintain packages for restricted, proprietary or
experimental software separately from the built-in repository. Spack experimental software separately from the built-in repository. Spack
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
The file starts with ``repos:`` and contains a single ordered list of The file starts with ``repos:`` and contains a single ordered list of
paths to repositories. Each path is on a separate line starting with paths to repositories. Each path is on a separate line starting with
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- /opt/repos/spack_repo/local_repo - /opt/local-repo
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``, When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
it searches these repositories in order (first to last) to resolve each it searches these repositories in order (first to last) to resolve each
package name. In this example, Spack will look for the following package name. In this example, Spack will look for the following
packages and use the first valid file: packages and use the first valid file:
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py`` 1. ``/opt/local-repo/packages/mpich/package.py``
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py`` 2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
.. note:: .. note::
@@ -101,15 +101,14 @@ Namespaces
Every repository in Spack has an associated **namespace** defined in its Every repository in Spack has an associated **namespace** defined in its
top-level ``repo.yaml`` file. If you look at top-level ``repo.yaml`` file. If you look at
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll ``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
see that its namespace is ``builtin``: see that its namespace is ``builtin``:
.. code-block:: console .. code-block:: console
$ cat var/spack/repos/spack_repo/builtin/repo.yaml $ cat var/spack/repos/builtin/repo.yaml
repo: repo:
namespace: builtin namespace: builtin
api: v2.0
Spack records the repository namespace of each installed package. For Spack records the repository namespace of each installed package. For
example, if you install the ``mpich`` package from the ``builtin`` repo, example, if you install the ``mpich`` package from the ``builtin`` repo,
@@ -218,15 +217,15 @@ Suppose you have three repositories: the builtin Spack repo
repo containing your own prototype packages (``proto``). Suppose they repo containing your own prototype packages (``proto``). Suppose they
contain packages as follows: contain packages as follows:
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| Namespace | Path to repo | Packages | | Namespace | Path to repo | Packages |
+==============+===============================================+=============================+ +==============+====================================+=============================+
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` | | ``proto`` | ``~/proto`` | ``mpich`` |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` | | ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others | | ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
Suppose that ``hdf5`` depends on ``mpich``. You can override the Suppose that ``hdf5`` depends on ``mpich``. You can override the
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``: built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
@@ -234,8 +233,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- /usr/local/repos/spack_repo/llnl - /usr/local/llnl
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``. ``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
@@ -244,9 +243,9 @@ If, instead, ``repos.yaml`` looks like this:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- ~/my_spack_repos/spack_repo/proto - ~/proto
- /usr/local/repos/spack_repo/llnl - /usr/local/llnl
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``. ``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
@@ -327,8 +326,8 @@ files, use ``spack repo list``.
$ spack repo list $ spack repo list
==> 2 package repositories. ==> 2 package repositories.
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo myrepo ~/myrepo
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
Each repository is listed with its associated namespace. To get the raw, Each repository is listed with its associated namespace. To get the raw,
merged YAML from all configuration files, use ``spack config get repos``: merged YAML from all configuration files, use ``spack config get repos``:
@@ -336,9 +335,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
.. code-block:: console .. code-block:: console
$ spack config get repos $ spack config get repos
repos: repos:srepos:
- ~/my_spack_repos/spack_repo/myrepo - ~/myrepo
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
Note that, unlike ``spack repo list``, this does not include the Note that, unlike ``spack repo list``, this does not include the
namespace, which is read from each repo's ``repo.yaml``. namespace, which is read from each repo's ``repo.yaml``.
@@ -352,54 +351,66 @@ yourself; you can use the ``spack repo create`` command.
.. code-block:: console .. code-block:: console
$ spack repo create ~/my_spack_repos myrepo $ spack repo create myrepo
==> Created repo with namespace 'myrepo'. ==> Created repo with namespace 'myrepo'.
==> To register it with spack, run this command: ==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/myrepo spack repo add ~/myrepo
$ ls ~/my_spack_repos/spack_repo/myrepo $ ls myrepo
packages/ repo.yaml packages/ repo.yaml
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml $ cat myrepo/repo.yaml
repo: repo:
namespace: 'myrepo' namespace: 'myrepo'
api: v2.0
Namespaces can also be nested, which can be useful if you have By default, the namespace of a new repo matches its directory's name.
multiple package repositories for an organization. Spack will You can supply a custom namespace with a second argument, e.g.:
create the corresponding directory structure for you:
.. code-block:: console .. code-block:: console
$ spack repo create ~/my_spack_repos llnl.comp $ spack repo create myrepo llnl.comp
==> Created repo with namespace 'llnl.comp'. ==> Created repo with namespace 'llnl.comp'.
==> To register it with spack, run this command: ==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/llnl/comp spack repo add ~/myrepo
$ cat myrepo/repo.yaml
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
repo: repo:
namespace: 'llnl.comp' namespace: 'llnl.comp'
api: v2.0
You can also create repositories with custom structure with the ``-d/--subdirectory``
argument, e.g.:
.. code-block:: console
$ spack repo create -d applications myrepo apps
==> Created repo with namespace 'apps'.
==> To register it with Spack, run this command:
spack repo add ~/myrepo
$ ls myrepo
applications/ repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: apps
subdirectory: applications
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
``spack repo add`` ``spack repo add``
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Once your repository is created, you can register it with Spack with Once your repository is created, you can register it with Spack with
``spack repo add``. You nee to specify the path to the directory that ``spack repo add``:
contains the ``repo.yaml`` file.
.. code-block:: console .. code-block:: console
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp $ spack repo add ./myrepo
==> Added repo with namespace 'llnl.comp'. ==> Added repo with namespace 'llnl.comp'.
$ spack repo list $ spack repo list
==> 2 package repositories. ==> 2 package repositories.
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp llnl.comp ~/myrepo
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
This simply adds the repo to your ``repos.yaml`` file. This simply adds the repo to your ``repos.yaml`` file.
@@ -421,43 +432,46 @@ By namespace:
.. code-block:: console .. code-block:: console
$ spack repo rm llnl.comp $ spack repo rm llnl.comp
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'. ==> Removed repository ~/myrepo with namespace 'llnl.comp'.
$ spack repo list $ spack repo list
==> 1 package repository. ==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
By path: By path:
.. code-block:: console .. code-block:: console
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp $ spack repo rm ~/myrepo
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp ==> Removed repository ~/myrepo
$ spack repo list $ spack repo list
==> 1 package repository. ==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
-------------------------------- --------------------------------
Repo namespaces and Python Repo namespaces and Python
-------------------------------- --------------------------------
Package repositories are implemented as Python packages. To be precise, You may have noticed that namespace notation for repositories is similar
they are `namespace packages to the notation for namespaces in Python. As it turns out, you *can*
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_ treat Spack repositories like Python packages; this is how they are
with ``spack_repo`` the top-level namespace, followed by the repository implemented.
namespace as submodules. For example, the builtin repository corresponds
to the Python module ``spack_repo.builtin.packages``.
This structure allows you to extend a ``builtin`` package in your own You could, for example, extend a ``builtin`` package in your own
repository: repository:
.. code-block:: python .. code-block:: python
from spack_repo.builtin.packages.mpich.package import Mpich from spack.pkg.builtin.mpich import Mpich
class MyPackage(Mpich): class MyPackage(Mpich):
... ...
Spack populates ``sys.path`` at runtime with the path to the root of your Spack repo namespaces are actually Python namespaces tacked on under
package repository's ``spack_repo`` directory. ``spack.pkg``. The search semantics of ``repos.yaml`` are actually
implemented using Python's built-in `sys.path
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.

View File

@@ -5,9 +5,9 @@ sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1 python-levenshtein==0.27.1
docutils==0.21.2 docutils==0.21.2
pygments==2.19.1 pygments==2.19.1
urllib3==2.4.0 urllib3==2.3.0
pytest==8.3.5 pytest==8.3.5
isort==6.0.1 isort==6.0.1
black==25.1.0 black==25.1.0
flake8==7.2.0 flake8==7.1.2
mypy==1.11.1 mypy==1.11.1

View File

@@ -11,7 +11,6 @@
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html * Homepage: https://altgraph.readthedocs.io/en/latest/index.html
* Usage: dependency of macholib * Usage: dependency of macholib
* Version: 0.17.3 * Version: 0.17.3
* License: MIT
archspec archspec
-------- --------
@@ -19,7 +18,6 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47) * Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
* License: Apache-2.0 or MIT
astunparse astunparse
---------------- ----------------
@@ -27,7 +25,6 @@
* Homepage: https://github.com/simonpercivall/astunparse * Homepage: https://github.com/simonpercivall/astunparse
* Usage: Unparsing Python ASTs for package hashes in Spack * Usage: Unparsing Python ASTs for package hashes in Spack
* Version: 1.6.3 (plus modifications) * Version: 1.6.3 (plus modifications)
* License: PSF-2.0
* Note: This is in ``spack.util.unparse`` because it's very heavily * Note: This is in ``spack.util.unparse`` because it's very heavily
modified, and we want to track coverage for it. modified, and we want to track coverage for it.
Specifically, we have modified this library to generate consistent unparsed ASTs Specifically, we have modified this library to generate consistent unparsed ASTs
@@ -44,7 +41,6 @@
* Homepage: https://github.com/python-attrs/attrs * Homepage: https://github.com/python-attrs/attrs
* Usage: Needed by jsonschema. * Usage: Needed by jsonschema.
* Version: 22.1.0 * Version: 22.1.0
* License: MIT
ctest_log_parser ctest_log_parser
---------------- ----------------
@@ -52,7 +48,6 @@
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx * Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
* Usage: Functions to parse build logs and extract error messages. * Usage: Functions to parse build logs and extract error messages.
* Version: Unversioned * Version: Unversioned
* License: BSD-3-Clause
* Note: This is a homemade port of Kitware's CTest build handler. * Note: This is a homemade port of Kitware's CTest build handler.
distro distro
@@ -61,7 +56,6 @@
* Homepage: https://pypi.python.org/pypi/distro * Homepage: https://pypi.python.org/pypi/distro
* Usage: Provides a more stable linux distribution detection. * Usage: Provides a more stable linux distribution detection.
* Version: 1.8.0 * Version: 1.8.0
* License: Apache-2.0
jinja2 jinja2
------ ------
@@ -69,7 +63,6 @@
* Homepage: https://pypi.python.org/pypi/Jinja2 * Homepage: https://pypi.python.org/pypi/Jinja2
* Usage: A modern and designer-friendly templating language for Python. * Usage: A modern and designer-friendly templating language for Python.
* Version: 3.0.3 (last version supporting Python 3.6) * Version: 3.0.3 (last version supporting Python 3.6)
* License: BSD-3-Clause
jsonschema jsonschema
---------- ----------
@@ -77,7 +70,6 @@
* Homepage: https://pypi.python.org/pypi/jsonschema * Homepage: https://pypi.python.org/pypi/jsonschema
* Usage: An implementation of JSON Schema for Python. * Usage: An implementation of JSON Schema for Python.
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped) * Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
* License: MIT
* Note: We don't include tests or benchmarks; just what Spack needs. * Note: We don't include tests or benchmarks; just what Spack needs.
macholib macholib
@@ -86,7 +78,6 @@
* Homepage: https://macholib.readthedocs.io/en/latest/index.html# * Homepage: https://macholib.readthedocs.io/en/latest/index.html#
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux * Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
* Version: 1.16.2 * Version: 1.16.2
* License: MIT
markupsafe markupsafe
---------- ----------
@@ -94,7 +85,6 @@
* Homepage: https://pypi.python.org/pypi/MarkupSafe * Homepage: https://pypi.python.org/pypi/MarkupSafe
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python. * Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
* Version: 2.0.1 (last version supporting Python 3.6) * Version: 2.0.1 (last version supporting Python 3.6)
* License: BSD-3-Clause
pyrsistent pyrsistent
---------- ----------
@@ -102,7 +92,6 @@
* Homepage: http://github.com/tobgu/pyrsistent/ * Homepage: http://github.com/tobgu/pyrsistent/
* Usage: Needed by `jsonschema` * Usage: Needed by `jsonschema`
* Version: 0.18.0 * Version: 0.18.0
* License: MIT
ruamel.yaml ruamel.yaml
------ ------
@@ -112,7 +101,6 @@
actively maintained and has more features, including round-tripping actively maintained and has more features, including round-tripping
comments read from config files. comments read from config files.
* Version: 0.17.21 * Version: 0.17.21
* License: MIT
six six
--- ---
@@ -120,6 +108,5 @@
* Homepage: https://pypi.python.org/pypi/six * Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities. * Usage: Python 2 and 3 compatibility utilities.
* Version: 1.16.0 * Version: 1.16.0
* License: MIT
""" """

View File

@@ -764,7 +764,7 @@ def copy_tree(
files = glob.glob(src) files = glob.glob(src)
if not files: if not files:
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT) raise OSError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add # For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all # all symlinks to this list while traversing the tree, then when finished, make all

View File

@@ -15,20 +15,7 @@
import typing import typing
import warnings import warnings
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import ( from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
Any,
Callable,
Dict,
Generic,
Iterable,
Iterator,
List,
Mapping,
Optional,
Tuple,
TypeVar,
Union,
)
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$" ignore_modules = r"^\.#|~$"
@@ -437,39 +424,46 @@ def add_func_to_class(name, func):
return cls return cls
K = TypeVar("K")
V = TypeVar("V")
@lazy_lexicographic_ordering @lazy_lexicographic_ordering
class HashableMap(typing.MutableMapping[K, V]): class HashableMap(collections.abc.MutableMapping):
"""This is a hashable, comparable dictionary. Hash is performed on """This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary.""" a tuple of the values in the dictionary."""
__slots__ = ("dict",) __slots__ = ("dict",)
def __init__(self): def __init__(self):
self.dict: Dict[K, V] = {} self.dict = {}
def __getitem__(self, key: K) -> V: def __getitem__(self, key):
return self.dict[key] return self.dict[key]
def __setitem__(self, key: K, value: V) -> None: def __setitem__(self, key, value):
self.dict[key] = value self.dict[key] = value
def __iter__(self) -> Iterator[K]: def __iter__(self):
return iter(self.dict) return iter(self.dict)
def __len__(self) -> int: def __len__(self):
return len(self.dict) return len(self.dict)
def __delitem__(self, key: K) -> None: def __delitem__(self, key):
del self.dict[key] del self.dict[key]
def _cmp_iter(self): def _cmp_iter(self):
for _, v in sorted(self.items()): for _, v in sorted(self.items()):
yield v yield v
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
self_type = type(self)
clone = self_type()
# Copy everything from this dict into it.
for key in self:
clone[key] = self[key].copy()
return clone
def match_predicate(*args): def match_predicate(*args):
"""Utility function for making string matching predicates. """Utility function for making string matching predicates.
@@ -1053,28 +1047,19 @@ def __exit__(self, exc_type, exc_value, tb):
return True return True
ClassPropertyType = TypeVar("ClassPropertyType") class classproperty:
class classproperty(Generic[ClassPropertyType]):
"""Non-data descriptor to evaluate a class-level property. The function that performs """Non-data descriptor to evaluate a class-level property. The function that performs
the evaluation is injected at creation time and takes an owner (i.e., the class that the evaluation is injected at creation time and take an instance (could be None) and
originated the instance). an owner (i.e. the class that originated the instance)
""" """
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None: def __init__(self, callback):
self.callback = callback self.callback = callback
def __get__(self, instance, owner) -> ClassPropertyType: def __get__(self, instance, owner):
return self.callback(owner) return self.callback(owner)
#: A type alias that represents either a classproperty descriptor or a constant value of the same
#: type. This allows derived classes to override a computed class-level property with a constant
#: value while retaining type compatibility.
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
class DeprecatedProperty: class DeprecatedProperty:
"""Data descriptor to error or warn when a deprecated property is accessed. """Data descriptor to error or warn when a deprecated property is accessed.

View File

@@ -18,7 +18,7 @@
#: version is incremented when the package API is extended in a backwards-compatible way. The major #: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the #: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version. #: Spack version.
package_api_version = (2, 0) package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should #: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies #: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies

View File

@@ -7,7 +7,7 @@
"llvm": "clang", "llvm": "clang",
"intel-oneapi-compilers": "oneapi", "intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc", "llvm-amdgpu": "rocmcc",
"intel-oneapi-compilers-classic": "intel", "intel-oneapi-compiler-classic": "intel",
"acfl": "arm", "acfl": "arm",
} }
@@ -15,6 +15,6 @@
"clang": "llvm", "clang": "llvm",
"oneapi": "intel-oneapi-compilers", "oneapi": "intel-oneapi-compilers",
"rocmcc": "llvm-amdgpu", "rocmcc": "llvm-amdgpu",
"intel": "intel-oneapi-compilers-classic", "intel": "intel-oneapi-compiler-classic",
"arm": "acfl", "arm": "acfl",
} }

View File

@@ -133,7 +133,7 @@ def mypy_root_spec() -> str:
def black_root_spec() -> str: def black_root_spec() -> str:
"""Return the root spec used to bootstrap black""" """Return the root spec used to bootstrap black"""
return _root_spec("py-black@:25.1.0") return _root_spec("py-black@:24.1.0")
def flake8_root_spec() -> str: def flake8_root_spec() -> str:

View File

@@ -36,11 +36,9 @@
import multiprocessing import multiprocessing
import os import os
import re import re
import signal
import sys import sys
import traceback import traceback
import types import types
import warnings
from collections import defaultdict from collections import defaultdict
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
@@ -574,10 +572,12 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
module.make = DeprecatedExecutable(pkg.name, "make", "gmake") module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake") module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja") module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32": if sys.platform == "win32":
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc") module.nmake = Executable("nmake")
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc") module.msbuild = Executable("msbuild")
# analog to configure for win32 # analog to configure for win32
module.cscript = Executable("cscript") module.cscript = Executable("cscript")
@@ -1189,9 +1189,11 @@ def _setup_pkg_and_run(
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)): if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
process = "test the installation" if context == "test" else "build from sources" process = "test the installation" if context == "test" else "build from sources"
error_msg = ( error_msg = (
"The '{}' package cannot find an attribute while trying to {}. You can fix this " "The '{}' package cannot find an attribute while trying to {}. "
"by updating the {} recipe, and you can also report the issue as a build-error or " "This might be due to a change in Spack's package format "
"a bug at https://github.com/spack/spack/issues" "to support multiple build-systems for a single package. You can fix this "
"by updating the {} recipe, and you can also report the issue as a bug. "
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
).format(pkg.name, process, context) ).format(pkg.name, process, context)
error_msg = colorize("@*R{{{}}}".format(error_msg)) error_msg = colorize("@*R{{{}}}".format(error_msg))
error_msg = "{}\n\n{}".format(str(e), error_msg) error_msg = "{}\n\n{}".format(str(e), error_msg)
@@ -1216,45 +1218,15 @@ def _setup_pkg_and_run(
input_pipe.close() input_pipe.close()
class BuildProcess: def start_build_process(pkg, function, kwargs):
def __init__(self, *, target, args) -> None:
self.p = multiprocessing.Process(target=target, args=args)
def start(self) -> None:
self.p.start()
def is_alive(self) -> bool:
return self.p.is_alive()
def join(self, *, timeout: Optional[int] = None):
self.p.join(timeout=timeout)
def terminate(self):
# Opportunity for graceful termination
self.p.terminate()
self.p.join(timeout=1)
# If the process didn't gracefully terminate, forcefully kill
if self.p.is_alive():
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
os.kill(self.p.pid, signal.SIGKILL)
self.p.join()
@property
def exitcode(self):
return self.p.exitcode
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
"""Create a child process to do part of a spack build. """Create a child process to do part of a spack build.
Args: Args:
pkg (spack.package_base.PackageBase): package whose environment we should set up the pkg (spack.package_base.PackageBase): package whose environment we should set up the
child process for. child process for.
function (typing.Callable): argless function to run in the child process. function (typing.Callable): argless function to run in the child
timeout: maximum time allowed to finish the execution of function process.
Usage:: Usage::
@@ -1282,14 +1254,14 @@ def child_fun():
# Forward sys.stdin when appropriate, to allow toggling verbosity # Forward sys.stdin when appropriate, to allow toggling verbosity
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"): if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
input_fd = Connection(os.dup(sys.stdin.fileno())) input_fd = Connection(os.dup(sys.stdin.fileno()))
mflags = os.environ.get("MAKEFLAGS") mflags = os.environ.get("MAKEFLAGS", False)
if mflags is not None: if mflags:
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags) m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
if m: if m:
jobserver_fd1 = Connection(int(m.group(1))) jobserver_fd1 = Connection(int(m.group(1)))
jobserver_fd2 = Connection(int(m.group(2))) jobserver_fd2 = Connection(int(m.group(2)))
p = BuildProcess( p = multiprocessing.Process(
target=_setup_pkg_and_run, target=_setup_pkg_and_run,
args=( args=(
serialized_pkg, serialized_pkg,
@@ -1323,17 +1295,14 @@ def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal" typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}" return f"{typ} {abs(p.exitcode)}"
p.join(timeout=timeout)
if p.is_alive():
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
p.terminate()
p.join()
try: try:
child_result = read_pipe.recv() child_result = read_pipe.recv()
except EOFError: except EOFError:
p.join()
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})") raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
p.join()
# If returns a StopPhase, raise it # If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase): if isinstance(child_result, spack.error.StopPhase):
# do not print # do not print

View File

@@ -16,7 +16,6 @@
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec import spack.spec
import spack.util.environment
import spack.util.prefix import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when from spack.multimethod import when
@@ -847,9 +846,7 @@ def _remove_libtool_archives(self) -> None:
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f: with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
f.write("\n".join(libtool_files)) f.write("\n".join(libtool_files))
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
if self.spec.platform == "darwin" and macos_version() >= Version("11"): if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version # Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11. # detection and fail to add flags if it shows as version 11.

View File

@@ -2,10 +2,9 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc import collections.abc
import enum
import os import os
import re import re
from typing import Optional, Tuple from typing import Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -14,7 +13,6 @@
import spack.spec import spack.spec
import spack.util.prefix import spack.util.prefix
from spack.directives import depends_on from spack.directives import depends_on
from spack.util.executable import which_string
from .cmake import CMakeBuilder, CMakePackage from .cmake import CMakeBuilder, CMakePackage
@@ -180,64 +178,6 @@ def initconfig_compiler_entries(self):
return entries return entries
class Scheduler(enum.Enum):
LSF = enum.auto()
SLURM = enum.auto()
FLUX = enum.auto()
def get_scheduler(self) -> Optional[Scheduler]:
spec = self.pkg.spec
# Check for Spectrum-mpi, which always uses LSF or LSF MPI variant
if spec.satisfies("^spectrum-mpi") or spec["mpi"].satisfies("schedulers=lsf"):
return self.Scheduler.LSF
# Check for Slurm MPI variants
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
return self.Scheduler.SLURM
# TODO improve this when MPI implementations support flux
# Do this check last to avoid using a flux wrapper present next to Slurm/ LSF schedulers
if which_string("flux") is not None:
return self.Scheduler.FLUX
return None
def get_mpi_exec(self) -> Optional[str]:
spec = self.pkg.spec
scheduler = self.get_scheduler()
if scheduler == self.Scheduler.LSF:
return which_string("lrun")
elif scheduler == self.Scheduler.SLURM:
if spec["mpi"].external:
return which_string("srun")
else:
return os.path.join(spec["slurm"].prefix.bin, "srun")
elif scheduler == self.Scheduler.FLUX:
flux = which_string("flux")
return f"{flux};run" if flux else None
elif hasattr(spec["mpi"].package, "mpiexec"):
return spec["mpi"].package.mpiexec
else:
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
if not os.path.exists(mpiexec):
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
return mpiexec
def get_mpi_exec_num_proc(self) -> str:
scheduler = self.get_scheduler()
if scheduler in [self.Scheduler.FLUX, self.Scheduler.LSF, self.Scheduler.SLURM]:
return "-n"
else:
return "-np"
def initconfig_mpi_entries(self): def initconfig_mpi_entries(self):
spec = self.pkg.spec spec = self.pkg.spec
@@ -257,10 +197,27 @@ def initconfig_mpi_entries(self):
if hasattr(spec["mpi"], "mpifc"): if hasattr(spec["mpi"], "mpifc"):
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc)) entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
# Determine MPIEXEC # Check for slurm
mpiexec = self.get_mpi_exec() using_slurm = False
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
using_slurm = True
if mpiexec is None or not os.path.exists(mpiexec.split(";")[0]): # Determine MPIEXEC
if using_slurm:
if spec["mpi"].external:
# Heuristic until we have dependents on externals
mpiexec = "/usr/bin/srun"
else:
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
elif hasattr(spec["mpi"].package, "mpiexec"):
mpiexec = spec["mpi"].package.mpiexec
else:
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
if not os.path.exists(mpiexec):
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
if not os.path.exists(mpiexec):
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
entries.append("# {0}\n".format(msg)) entries.append("# {0}\n".format(msg))
tty.warn(msg) tty.warn(msg)
@@ -273,7 +230,10 @@ def initconfig_mpi_entries(self):
entries.append(cmake_cache_path("MPIEXEC", mpiexec)) entries.append(cmake_cache_path("MPIEXEC", mpiexec))
# Determine MPIEXEC_NUMPROC_FLAG # Determine MPIEXEC_NUMPROC_FLAG
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", self.get_mpi_exec_num_proc())) if using_slurm:
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-n"))
else:
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-np"))
return entries return entries
@@ -316,18 +276,30 @@ def initconfig_hardware_entries(self):
entries.append("# ROCm") entries.append("# ROCm")
entries.append("#------------------{0}\n".format("-" * 30)) entries.append("#------------------{0}\n".format("-" * 30))
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix) if spec.satisfies("^blt@0.7:"):
entries.append(cmake_cache_path("ROCM_PATH", rocm_root)) rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
else:
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath(
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
)
)
archs = self.spec.variants["amdgpu_target"].value archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none": if archs[0] != "none":
arch_str = ";".join(archs) arch_str = ";".join(archs)
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
llvm_bin = spec["llvm-amdgpu"].prefix.bin entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
entries.append(
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++"))
)
if spec.satisfies("%gcc"): if spec.satisfies("%gcc"):
entries.append( entries.append(
@@ -336,15 +308,6 @@ def initconfig_hardware_entries(self):
) )
) )
# Extra definitions that might be required in other cases
if not spec.satisfies("^blt"):
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
return entries return entries
def std_initconfig_entries(self): def std_initconfig_entries(self):

View File

@@ -8,7 +8,6 @@
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec import spack.spec
import spack.util.environment
import spack.util.prefix import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
@@ -87,9 +86,7 @@ def check_args(self):
"""Argument for ``cargo test`` during check phase""" """Argument for ``cargo test`` during check phase"""
return [] return []
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
env.set("CARGO_HOME", self.stage.path) env.set("CARGO_HOME", self.stage.path)
def build( def build(

View File

@@ -47,11 +47,6 @@ class CompilerPackage(spack.package_base.PackageBase):
#: Relative path to compiler wrappers #: Relative path to compiler wrappers
compiler_wrapper_link_paths: Dict[str, str] = {} compiler_wrapper_link_paths: Dict[str, str] = {}
#: Optimization flags
opt_flags: Sequence[str] = []
#: Flags for generating debug information
debug_flags: Sequence[str] = []
def __init__(self, spec: "spack.spec.Spec"): def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec) super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages" msg = f"Supported languages for {spec} are not a subset of possible supported languages"

View File

@@ -8,7 +8,6 @@
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec import spack.spec
import spack.util.environment
import spack.util.prefix import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
@@ -69,9 +68,7 @@ class GoBuilder(BuilderWithDefaults):
#: Callback names for install-time test #: Callback names for install-time test
install_time_test_callbacks = ["check"] install_time_test_callbacks = ["check"]
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
env.set("GO111MODULE", "on") env.set("GO111MODULE", "on")
env.set("GOTOOLCHAIN", "local") env.set("GOTOOLCHAIN", "local")
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go")) env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))

View File

@@ -23,7 +23,6 @@
import spack.error import spack.error
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
from spack.build_environment import dso_suffix from spack.build_environment import dso_suffix
from spack.error import InstallError from spack.error import InstallError
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
@@ -1017,7 +1016,7 @@ def libs(self):
debug_print(result) debug_print(result)
return result return result
def setup_run_environment(self, env: EnvironmentModifications) -> None: def setup_run_environment(self, env):
"""Adds environment variables to the generated module file. """Adds environment variables to the generated module file.
These environment variables come from running: These environment variables come from running:
@@ -1050,13 +1049,11 @@ def setup_run_environment(self, env: EnvironmentModifications) -> None:
env.set("F77", self.prefix.bin.ifort) env.set("F77", self.prefix.bin.ifort)
env.set("F90", self.prefix.bin.ifort) env.set("F90", self.prefix.bin.ifort)
def setup_dependent_build_environment( def setup_dependent_build_environment(self, env, dependent_spec):
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
) -> None:
# NB: This function is overwritten by 'mpi' provider packages: # NB: This function is overwritten by 'mpi' provider packages:
# #
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py # var/spack/repos/builtin/packages/intel-mpi/package.py
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py # var/spack/repos/builtin/packages/intel-parallel-studio/package.py
# #
# They call _setup_dependent_env_callback() as well, but with the # They call _setup_dependent_env_callback() as well, but with the
# dictionary kwarg compilers_of_client{} present and populated. # dictionary kwarg compilers_of_client{} present and populated.
@@ -1064,12 +1061,7 @@ def setup_dependent_build_environment(
# Handle everything in a callback version. # Handle everything in a callback version.
self._setup_dependent_env_callback(env, dependent_spec) self._setup_dependent_env_callback(env, dependent_spec)
def _setup_dependent_env_callback( def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
self,
env: EnvironmentModifications,
dependent_spec: spack.spec.Spec,
compilers_of_client={},
) -> None:
# Expected to be called from a client's # Expected to be called from a client's
# setup_dependent_build_environment(), # setup_dependent_build_environment(),
# with args extended to convey the client's compilers as needed. # with args extended to convey the client's compilers as needed.

View File

@@ -8,7 +8,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec import spack.spec
import spack.util.environment
import spack.util.executable import spack.util.executable
import spack.util.prefix import spack.util.prefix
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
@@ -115,7 +114,5 @@ def install(
def _luarocks_config_path(self): def _luarocks_config_path(self):
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua") return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
env.set("LUAROCKS_CONFIG", self._luarocks_config_path()) env.set("LUAROCKS_CONFIG", self._luarocks_config_path())

View File

@@ -4,7 +4,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec import spack.spec
import spack.util.environment
import spack.util.prefix import spack.util.prefix
from spack.directives import build_system, extends from spack.directives import build_system, extends
from spack.multimethod import when from spack.multimethod import when
@@ -58,9 +57,7 @@ def install(
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file), "pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
) )
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
# octave does not like those environment variables to be set: # octave does not like those environment variables to be set:
env.unset("CC") env.unset("CC")
env.unset("CXX") env.unset("CXX")

View File

@@ -106,8 +106,8 @@ def install_component(self, installer_path):
bash = Executable("bash") bash = Executable("bash")
# Installer writes files in ~/intel set HOME so it goes to staging directory # Installer writes files in ~/intel set HOME so it goes to prefix
bash.add_default_env("HOME", join_path(self.stage.path, "home")) bash.add_default_env("HOME", self.prefix)
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well # Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime")) bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
if not isdir(install_dir): if not isdir(install_dir):
raise RuntimeError("install failed to directory: {0}".format(install_dir)) raise RuntimeError("install failed to directory: {0}".format(install_dir))
def setup_run_environment(self, env: EnvironmentModifications) -> None: def setup_run_environment(self, env):
"""Adds environment variables to the generated module file. """Adds environment variables to the generated module file.
These environment variables come from running: These environment variables come from running:
@@ -311,4 +311,4 @@ def ld_flags(self):
#: Tuple of Intel math libraries, exported to packages #: Tuple of Intel math libraries, exported to packages
INTEL_MATH_LIBRARIES = ("intel-oneapi-mkl",) INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")

View File

@@ -13,9 +13,9 @@
import archspec import archspec
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path from llnl.util.filesystem import HeaderList, LibraryList, join_path
from llnl.util.lang import ClassProperty, classproperty, match_predicate
import spack.builder import spack.builder
import spack.config import spack.config
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
ext_map = view.extensions_layout.extension_map(self.extendee_spec) ext_map = view.extensions_layout.extension_map(self.extendee_spec)
namespaces = set(x.package.py_namespace for x in ext_map.values()) namespaces = set(x.package.py_namespace for x in ext_map.values())
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace) namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
find_namespace = match_predicate(namespace_re) find_namespace = lang.match_predicate(namespace_re)
if self.py_namespace in namespaces: if self.py_namespace in namespaces:
conflicts = list(x for x in conflicts if not find_namespace(x)) conflicts = list(x for x in conflicts if not find_namespace(x))
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
) )
if self.py_namespace in remaining_namespaces: if self.py_namespace in remaining_namespaces:
namespace_init = match_predicate( namespace_init = lang.match_predicate(
r"site-packages/{0}/__init__.py".format(self.py_namespace) r"site-packages/{0}/__init__.py".format(self.py_namespace)
) )
ignore_namespace = True ignore_namespace = True
@@ -324,27 +324,6 @@ def get_external_python_for_prefix(self):
raise StopIteration("No external python could be detected for %s to depend on" % self.spec) raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
def _homepage(cls: "PythonPackage") -> Optional[str]:
"""Get the homepage from PyPI if available."""
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/project/{name}/"
return None
def _url(cls: "PythonPackage") -> Optional[str]:
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
def _list_url(cls: "PythonPackage") -> Optional[str]:
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/simple/{name}/"
return None
class PythonPackage(PythonExtension): class PythonPackage(PythonExtension):
"""Specialized class for packages that are built using pip.""" """Specialized class for packages that are built using pip."""
@@ -372,9 +351,25 @@ class PythonPackage(PythonExtension):
py_namespace: Optional[str] = None py_namespace: Optional[str] = None
homepage: ClassProperty[Optional[str]] = classproperty(_homepage) @lang.classproperty
url: ClassProperty[Optional[str]] = classproperty(_url) def homepage(cls) -> Optional[str]: # type: ignore[override]
list_url: ClassProperty[Optional[str]] = classproperty(_list_url) if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/project/{name}/"
return None
@lang.classproperty
def url(cls) -> Optional[str]:
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
@lang.classproperty
def list_url(cls) -> Optional[str]: # type: ignore[override]
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/simple/{name}/"
return None
@property @property
def python_spec(self) -> Spec: def python_spec(self) -> Spec:

View File

@@ -3,8 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Optional, Tuple from typing import Optional, Tuple
import llnl.util.lang as lang
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
from llnl.util.lang import ClassProperty, classproperty
from spack.directives import extends from spack.directives import extends
@@ -54,32 +54,6 @@ def install(self, pkg, spec, prefix):
pkg.module.R(*args) pkg.module.R(*args)
def _homepage(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/package={cls.cran}"
elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}"
return None
def _url(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
return None
def _list_url(cls: "RPackage") -> Optional[str]:
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
return None
def _git(cls: "RPackage") -> Optional[str]:
if cls.bioc:
return f"https://git.bioconductor.org/packages/{cls.bioc}"
return None
class RPackage(Package): class RPackage(Package):
"""Specialized class for packages that are built using R. """Specialized class for packages that are built using R.
@@ -103,7 +77,24 @@ class RPackage(Package):
extends("r") extends("r")
homepage: ClassProperty[Optional[str]] = classproperty(_homepage) @lang.classproperty
url: ClassProperty[Optional[str]] = classproperty(_url) def homepage(cls):
list_url: ClassProperty[Optional[str]] = classproperty(_list_url) if cls.cran:
git: ClassProperty[Optional[str]] = classproperty(_git) return f"https://cloud.r-project.org/package={cls.cran}"
elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}"
@lang.classproperty
def url(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
@lang.classproperty
def list_url(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
@lang.classproperty
def git(cls):
if cls.bioc:
return f"https://git.bioconductor.org/packages/{cls.bioc}"

View File

@@ -5,8 +5,8 @@
from typing import Optional, Tuple from typing import Optional, Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import ClassProperty, classproperty
import spack.builder import spack.builder
import spack.spec import spack.spec
@@ -19,12 +19,6 @@
from spack.util.executable import Executable, ProcessError from spack.util.executable import Executable, ProcessError
def _homepage(cls: "RacketPackage") -> Optional[str]:
if cls.racket_name:
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
return None
class RacketPackage(PackageBase): class RacketPackage(PackageBase):
"""Specialized class for packages that are built using Racket's """Specialized class for packages that are built using Racket's
`raco pkg install` and `raco setup` commands. `raco pkg install` and `raco setup` commands.
@@ -43,7 +37,13 @@ class RacketPackage(PackageBase):
extends("racket", when="build_system=racket") extends("racket", when="build_system=racket")
racket_name: Optional[str] = None racket_name: Optional[str] = None
homepage: ClassProperty[Optional[str]] = classproperty(_homepage) parallel = True
@lang.classproperty
def homepage(cls):
if cls.racket_name:
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
return None
@spack.builder.builder("racket") @spack.builder.builder("racket")

View File

@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]: def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
"""Return the builder class if a package module defines it.""" """Return the builder class if a package module defines it."""
cls = getattr(pkg.module, name, None) cls = getattr(pkg.module, name, None)
if cls and spack.repo.is_package_module(cls.__module__): if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
return cls return cls
return None return None
@@ -121,7 +121,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
new_cls_name, new_cls_name,
bases, bases,
{ {
"__module__": package_cls.__module__,
"run_tests": property(lambda x: x.wrapped_package_object.run_tests), "run_tests": property(lambda x: x.wrapped_package_object.run_tests),
"test_requires_compiler": property( "test_requires_compiler": property(
lambda x: x.wrapped_package_object.test_requires_compiler lambda x: x.wrapped_package_object.test_requires_compiler
@@ -130,6 +129,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
"tester": property(lambda x: x.wrapped_package_object.tester), "tester": property(lambda x: x.wrapped_package_object.tester),
}, },
) )
new_cls.__module__ = package_cls.__module__
self.__class__ = new_cls self.__class__ = new_cls
self.__dict__.update(wrapped_pkg_object.__dict__) self.__dict__.update(wrapped_pkg_object.__dict__)
@@ -185,16 +185,10 @@ def __init__(self, pkg):
# These two methods don't follow the (self, spec, prefix) signature of phases nor # These two methods don't follow the (self, spec, prefix) signature of phases nor
# the (self) signature of methods, so they are added explicitly to avoid using a # the (self) signature of methods, so they are added explicitly to avoid using a
# catch-all (*args, **kwargs) # catch-all (*args, **kwargs)
def setup_build_environment( def setup_build_environment(self, env):
self, env: spack.util.environment.EnvironmentModifications
) -> None:
return self.pkg_with_dispatcher.setup_build_environment(env) return self.pkg_with_dispatcher.setup_build_environment(env)
def setup_dependent_build_environment( def setup_dependent_build_environment(self, env, dependent_spec):
self,
env: spack.util.environment.EnvironmentModifications,
dependent_spec: spack.spec.Spec,
) -> None:
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec) return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
return Adapter(pkg) return Adapter(pkg)
@@ -408,7 +402,7 @@ def fixup_install(self):
# do something after the package is installed # do something after the package is installed
pass pass
def setup_build_environment(self, env: EnvironmentModifications) -> None: def setup_build_environment(self, env):
env.set("MY_ENV_VAR", "my_value") env.set("MY_ENV_VAR", "my_value")
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder): class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):

View File

@@ -14,7 +14,7 @@
import tempfile import tempfile
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Optional, Set, Union from typing import Callable, Dict, List, Set, Union
from urllib.request import Request from urllib.request import Request
import llnl.path import llnl.path
@@ -24,7 +24,6 @@
import spack import spack
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.builder
import spack.config as cfg import spack.config as cfg
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
@@ -150,10 +149,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
return False return False
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]: def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
"""Determine which packages were added, removed or changed """Determine which packages were added, removed or changed
between rev1 and rev2, and return the names as a set""" between rev1 and rev2, and return the names as a set"""
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2) return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None): def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
@@ -614,40 +613,32 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_spec, and attempts to copy the files into the directory given job_spec, and attempts to copy the files into the directory given
by job_log_dir. by job_log_dir.
Parameters: Args:
job_spec: spec associated with spack install log job_spec: spec associated with spack install log
job_log_dir: path into which build log should be copied job_log_dir: path into which build log should be copied
""" """
tty.debug(f"job spec: {job_spec}") tty.debug(f"job spec: {job_spec}")
if not job_spec.concrete:
tty.warn("Cannot copy artifacts for non-concrete specs") try:
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
except spack.error.SpackError as e:
tty.error(f"Cannot copy logs: {str(e)}")
return return
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec)) # Get the package's archived files
if not os.path.isdir(package_metadata_root): archive_files = []
# Fallback to using the stage directory archive_root = package_metadata_root / "archived-files"
job_pkg = job_spec.package if archive_root.is_dir():
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
package_metadata_root = pathlib.Path(job_pkg.stage.path)
archive_files = spack.builder.create(job_pkg).archive_files
tty.warn("Package not installed, falling back to use stage dir")
tty.debug(f"stage dir: {package_metadata_root}")
else: else:
# Get the package's archived files msg = "Cannot copy package archived files: archived-files must be a directory"
archive_files = [] tty.warn(msg)
archive_root = package_metadata_root / "archived-files"
if os.path.isdir(archive_root):
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
else:
tty.debug(f"No archived files detected at {archive_root}")
# Try zipped and unzipped versions of the build log
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz" build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
build_log = package_metadata_root / "spack-build-out.txt"
build_env_mods = package_metadata_root / "spack-build-env.txt" build_env_mods = package_metadata_root / "spack-build-env.txt"
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]: for f in [build_log_zipped, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True) copy_files_to_artifacts(str(f), job_log_dir)
def copy_test_logs_to_artifacts(test_stage, job_test_dir): def copy_test_logs_to_artifacts(test_stage, job_test_dir):
@@ -660,12 +651,11 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
""" """
tty.debug(f"test stage: {test_stage}") tty.debug(f"test stage: {test_stage}")
if not os.path.exists(test_stage): if not os.path.exists(test_stage):
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist") msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
tty.error(msg)
return return
copy_files_to_artifacts( copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
)
def download_and_extract_artifacts(url, work_dir) -> str: def download_and_extract_artifacts(url, work_dir) -> str:
@@ -1304,34 +1294,35 @@ def display_broken_spec_messages(base_url, hashes):
tty.msg(msg) tty.msg(msg)
def run_standalone_tests( def run_standalone_tests(**kwargs):
*,
cdash: Optional[CDashHandler] = None,
fail_fast: bool = False,
log_file: Optional[str] = None,
job_spec: Optional[spack.spec.Spec] = None,
repro_dir: Optional[str] = None,
timeout: Optional[int] = None,
):
"""Run stand-alone tests on the current spec. """Run stand-alone tests on the current spec.
Args: Arguments:
cdash: cdash handler instance kwargs (dict): dictionary of arguments used to run the tests
fail_fast: terminate tests after the first failure
log_file: test log file name if NOT CDash reporting List of recognized keys:
job_spec: spec that was built
repro_dir: reproduction directory * "cdash" (CDashHandler): (optional) cdash handler instance
timeout: maximum time (in seconds) that tests are allowed to run * "fail_fast" (bool): (optional) terminate tests after the first failure
* "log_file" (str): (optional) test log file name if NOT CDash reporting
* "job_spec" (Spec): spec that was built
* "repro_dir" (str): reproduction directory
""" """
cdash = kwargs.get("cdash")
fail_fast = kwargs.get("fail_fast")
log_file = kwargs.get("log_file")
if cdash and log_file: if cdash and log_file:
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting") tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
log_file = None log_file = None
# Error out but do NOT terminate if there are missing required arguments. # Error out but do NOT terminate if there are missing required arguments.
job_spec = kwargs.get("job_spec")
if not job_spec: if not job_spec:
tty.error("Job spec is required to run stand-alone tests") tty.error("Job spec is required to run stand-alone tests")
return return
repro_dir = kwargs.get("repro_dir")
if not repro_dir: if not repro_dir:
tty.error("Reproduction directory is required for stand-alone tests") tty.error("Reproduction directory is required for stand-alone tests")
return return
@@ -1340,9 +1331,6 @@ def run_standalone_tests(
if fail_fast: if fail_fast:
test_args.append("--fail-fast") test_args.append("--fail-fast")
if timeout is not None:
test_args.extend(["--timeout", str(timeout)])
if cdash: if cdash:
test_args.extend(cdash.args()) test_args.extend(cdash.args())
else: else:

View File

@@ -2,13 +2,9 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy import copy
import errno
import glob
import gzip
import json import json
import os import os
import re import re
import shutil
import sys import sys
import time import time
from collections import deque from collections import deque
@@ -29,7 +25,6 @@
import spack.mirrors.mirror import spack.mirrors.mirror
import spack.schema import spack.schema
import spack.spec import spack.spec
import spack.util.compression as compression
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url as url_util import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
@@ -45,67 +40,22 @@
_urlopen = web_util.urlopen _urlopen = web_util.urlopen
def copy_gzipped(glob_or_path: str, dest: str) -> None: def copy_files_to_artifacts(src, artifacts_dir):
"""Copy all of the files in the source glob/path to the destination.
Args:
glob_or_path: path to file to test
dest: destination path to copy to
"""
files = glob.glob(glob_or_path)
if not files:
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
)
def is_gzipped(path):
with open(path, "rb") as fd:
return compression.GZipFileType().matches_magic(fd)
for src in files:
if is_gzipped(src):
fs.copy(src, dest)
else:
# Compress and copy in one step
src_name = os.path.basename(src)
if os.path.isdir(dest):
zipped = os.path.join(dest, f"{src_name}.gz")
elif not dest.endswith(".gz"):
zipped = f"{dest}.gz"
else:
zipped = dest
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
shutil.copyfileobj(fin, fout)
def copy_files_to_artifacts(
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
) -> None:
""" """
Copy file(s) to the given artifacts directory Copy file(s) to the given artifacts directory
Args: Parameters:
src (str): the glob-friendly path expression for the file(s) to copy src (str): the glob-friendly path expression for the file(s) to copy
artifacts_dir (str): the destination directory artifacts_dir (str): the destination directory
compress_artifacts (bool): option to compress copied artifacts using Gzip
""" """
try: try:
fs.copy(src, artifacts_dir)
if compress_artifacts:
copy_gzipped(src, artifacts_dir)
else:
fs.copy(src, artifacts_dir)
except Exception as err: except Exception as err:
tty.warn( msg = (
( f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to " f"exception: {str(err)}"
f"exception: {str(err)}"
)
) )
tty.warn(msg)
def win_quote(quote_str: str) -> str: def win_quote(quote_str: str) -> str:

View File

@@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
all_headers (bool): show headers even when arch/compiler aren't defined all_headers (bool): show headers even when arch/compiler aren't defined
status_fn (typing.Callable): if provided, prepend install-status info status_fn (typing.Callable): if provided, prepend install-status info
output (typing.IO): A file object to write to. Default is ``sys.stdout`` output (typing.IO): A file object to write to. Default is ``sys.stdout``
specfile_format (bool): specfile format of the current spec
""" """
def get_arg(name, default=None): def get_arg(name, default=None):
@@ -458,7 +458,6 @@ def get_arg(name, default=None):
all_headers = get_arg("all_headers", False) all_headers = get_arg("all_headers", False)
output = get_arg("output", sys.stdout) output = get_arg("output", sys.stdout)
status_fn = get_arg("status_fn", None) status_fn = get_arg("status_fn", None)
specfile_format = get_arg("specfile_format", False)
decorator = get_arg("decorator", None) decorator = get_arg("decorator", None)
if decorator is None: if decorator is None:
@@ -480,9 +479,6 @@ def get_arg(name, default=None):
vfmt = "{variants}" if variants else "" vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + vfmt + ffmt format_string = nfmt + "{@version}" + vfmt + ffmt
if specfile_format:
format_string = "[{specfile_version}] " + format_string
def fmt(s, depth=0): def fmt(s, depth=0):
"""Formatter function for all output specs""" """Formatter function for all output specs"""
string = "" string = ""

View File

@@ -76,6 +76,9 @@ def setup_parser(subparser: argparse.ArgumentParser):
default=False, default=False,
help="regenerate buildcache index after building package(s)", help="regenerate buildcache index after building package(s)",
) )
push.add_argument(
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
)
push.add_argument( push.add_argument(
"--only", "--only",
default="package,dependencies", default="package,dependencies",
@@ -189,14 +192,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
default=lambda: spack.config.default_modify_scope(), default=lambda: spack.config.default_modify_scope(),
help="configuration scope containing mirrors to check", help="configuration scope containing mirrors to check",
) )
# Unfortunately there are 3 ways to do the same thing here:
check_specs = check.add_mutually_exclusive_group()
check_specs.add_argument(
"-s", "--spec", help="check single spec instead of release specs file"
)
check_specs.add_argument(
"--spec-file",
help="check single spec from json or yaml file instead of release specs file",
)
arguments.add_common_arguments(check, ["specs"]) arguments.add_common_arguments(check, ["specs"])
check.set_defaults(func=check_fn) check.set_defaults(func=check_fn)
# Download tarball and specfile # Download tarball and specfile
download = subparsers.add_parser("download", help=download_fn.__doc__) download = subparsers.add_parser("download", help=download_fn.__doc__)
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror") download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
download_spec_or_specfile.add_argument(
"-s", "--spec", help="download built tarball for spec from mirror"
)
download_spec_or_specfile.add_argument(
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
)
download.add_argument( download.add_argument(
"-p", "-p",
"--path", "--path",
@@ -206,10 +223,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
) )
download.set_defaults(func=download_fn) download.set_defaults(func=download_fn)
# Get buildcache name
getbuildcachename = subparsers.add_parser(
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
)
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
required=True
)
getbuildcachename_spec_or_specfile.add_argument(
"-s", "--spec", help="spec string for which buildcache name is desired"
)
getbuildcachename_spec_or_specfile.add_argument(
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
)
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file # Given the root spec, save the yaml of the dependent spec to a file
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__) savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True) savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec") savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
savespecfile_spec_or_specfile.add_argument(
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
)
savespecfile.add_argument( savespecfile.add_argument(
"-s", "-s",
"--specs", "--specs",
@@ -345,8 +380,14 @@ def _specs_to_be_packaged(
def push_fn(args): def push_fn(args):
"""create a binary package and push it to a mirror""" """create a binary package and push it to a mirror"""
if args.specs: if args.spec_file:
roots = _matching_specs(spack.cmd.parse_specs(args.specs)) tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
"Use positional arguments instead."
)
if args.specs or args.spec_file:
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
else: else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots() roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
@@ -488,7 +529,22 @@ def check_fn(args: argparse.Namespace):
this command uses the process exit code to indicate its result, specifically, if the this command uses the process exit code to indicate its result, specifically, if the
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
""" """
specs_arg = args.specs if args.spec_file:
specs_arg = (
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
)
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
f"Use `spack buildcache check {specs_arg}` instead."
)
elif args.spec:
specs_arg = args.spec
tty.warn(
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
f"Use `spack buildcache check {specs_arg}` instead."
)
else:
specs_arg = args.specs
if specs_arg: if specs_arg:
specs = _matching_specs(spack.cmd.parse_specs(specs_arg)) specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
@@ -522,7 +578,13 @@ def download_fn(args):
code indicates that the command failed to download at least one of the required buildcache code indicates that the command failed to download at least one of the required buildcache
components components
""" """
specs = _matching_specs(spack.cmd.parse_specs(args.spec)) if args.spec_file:
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
"Use --spec instead."
)
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
if len(specs) != 1: if len(specs) != 1:
tty.die("a single spec argument is required to download from a buildcache") tty.die("a single spec argument is required to download from a buildcache")
@@ -531,6 +593,15 @@ def download_fn(args):
sys.exit(1) sys.exit(1)
def get_buildcache_name_fn(args):
"""get name (prefix) of buildcache entries for this spec"""
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
if len(specs) != 1:
tty.die("a single spec argument is required to get buildcache name")
print(bindist.tarball_name(specs[0], ""))
def save_specfile_fn(args): def save_specfile_fn(args):
"""get full spec for dependencies and write them to files in the specified output directory """get full spec for dependencies and write them to files in the specified output directory
@@ -538,7 +609,13 @@ def save_specfile_fn(args):
successful. if any errors or exceptions are encountered, or if expected command-line arguments successful. if any errors or exceptions are encountered, or if expected command-line arguments
are not provided, then the exit code will be non-zero are not provided, then the exit code will be non-zero
""" """
specs = spack.cmd.parse_specs(args.root_spec) if args.root_specfile:
tty.warn(
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
"Use --root-spec instead."
)
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
if len(specs) != 1: if len(specs) != 1:
tty.die("a single spec argument is required to save specfile") tty.die("a single spec argument is required to save specfile")

View File

@@ -4,6 +4,7 @@
import json import json
import os import os
import re
import shutil import shutil
import sys import sys
from typing import Dict from typing import Dict
@@ -25,10 +26,12 @@
import spack.hash_types as ht import spack.hash_types as ht
import spack.mirrors.mirror import spack.mirrors.mirror
import spack.package_base import spack.package_base
import spack.paths
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.stage import spack.stage
import spack.util.executable import spack.util.executable
import spack.util.git
import spack.util.gpg as gpg_util import spack.util.gpg as gpg_util
import spack.util.timer as timer import spack.util.timer as timer
import spack.util.url as url_util import spack.util.url as url_util
@@ -42,6 +45,7 @@
SPACK_COMMAND = "spack" SPACK_COMMAND = "spack"
INSTALL_FAIL_CODE = 1 INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100 FAILED_CREATE_BUILDCACHE_CODE = 100
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
def deindent(desc): def deindent(desc):
@@ -160,12 +164,6 @@ def setup_parser(subparser):
default=False, default=False,
help="stop stand-alone tests after the first failure", help="stop stand-alone tests after the first failure",
) )
rebuild.add_argument(
"--timeout",
type=int,
default=None,
help="maximum time (in seconds) that tests are allowed to run",
)
rebuild.set_defaults(func=ci_rebuild) rebuild.set_defaults(func=ci_rebuild)
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"]) spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
@@ -453,7 +451,7 @@ def ci_rebuild(args):
# Arguments when installing the root from sources # Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"] deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--keep-stage", "--only=package"] root_install_args = install_args + ["--only=package"]
if cdash_handler: if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting. # Add additional arguments to `spack install` for CDash reporting.
@@ -493,9 +491,6 @@ def ci_rebuild(args):
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now # Copy logs and archived files from the install metadata (.spack) directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir) spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# Clear the stage directory
spack.stage.purge()
# If the installation succeeded and we're running stand-alone tests for # If the installation succeeded and we're running stand-alone tests for
# the package, run them and copy the output. Failures of any kind should # the package, run them and copy the output. Failures of any kind should
# *not* terminate the build process or preclude creating the build cache. # *not* terminate the build process or preclude creating the build cache.
@@ -530,7 +525,6 @@ def ci_rebuild(args):
fail_fast=args.fail_fast, fail_fast=args.fail_fast,
log_file=log_file, log_file=log_file,
repro_dir=repro_dir, repro_dir=repro_dir,
timeout=args.timeout,
) )
except Exception as err: except Exception as err:
@@ -789,17 +783,18 @@ def ci_verify_versions(args):
then parses the git diff between the two to determine which packages then parses the git diff between the two to determine which packages
have been modified verifies the new checksums inside of them. have been modified verifies the new checksums inside of them.
""" """
# Get a list of all packages that have been changed or added with fs.working_dir(spack.paths.prefix):
# between from_ref and to_ref # We use HEAD^1 explicitly on the merge commit created by
pkgs = spack.repo.get_all_package_diffs( # GitHub Actions. However HEAD~1 is a safer default for the helper function.
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
)
# Get a list of package names from the modified files.
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
failed_version = False failed_version = False
for pkg_name in pkgs: for pkg_name, path in pkgs:
spec = spack.spec.Spec(pkg_name) spec = spack.spec.Spec(pkg_name)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec) pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
path = spack.repo.PATH.package_path(pkg_name)
# Skip checking manual download packages and trust the maintainers # Skip checking manual download packages and trust the maintainers
if pkg.manual_download: if pkg.manual_download:
@@ -823,7 +818,7 @@ def ci_verify_versions(args):
# TODO: enforce every version have a commit or a sha256 defined if not # TODO: enforce every version have a commit or a sha256 defined if not
# an infinite version (there are a lot of package's where this doesn't work yet.) # an infinite version (there are a lot of package's where this doesn't work yet.)
with fs.working_dir(os.path.dirname(path)): with fs.working_dir(spack.paths.prefix):
added_checksums = spack_ci.get_added_versions( added_checksums = spack_ci.get_added_versions(
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
) )

View File

@@ -63,7 +63,7 @@ def setup_parser(subparser):
) )
# List # List
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers") list_parser = sp.add_parser("list", help="list available compilers")
list_parser.add_argument( list_parser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read from" "--scope", action=arguments.ConfigScope, help="configuration scope to read from"
) )
@@ -216,6 +216,5 @@ def compiler(parser, args):
"rm": compiler_remove, "rm": compiler_remove,
"info": compiler_info, "info": compiler_info,
"list": compiler_list, "list": compiler_list,
"ls": compiler_list,
} }
action[args.compiler_command](args) action[args.compiler_command](args)

View File

@@ -23,7 +23,7 @@
from spack.util.editor import editor from spack.util.editor import editor
from spack.util.executable import which from spack.util.executable import which
from spack.util.format import get_version_lines from spack.util.format import get_version_lines
from spack.util.naming import pkg_name_to_class_name, simplify_name from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
description = "create a new package file" description = "create a new package file"
section = "packaging" section = "packaging"
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
def __init__(self, name: str, versions, languages: List[str]): def __init__(self, name: str, versions, languages: List[str]):
self.name = name self.name = name
self.class_name = pkg_name_to_class_name(name) self.class_name = mod_to_class(name)
self.versions = versions self.versions = versions
self.languages = languages self.languages = languages
@@ -572,7 +572,7 @@ def edit(self, spec, prefix):
class IntelPackageTemplate(PackageTemplate): class IntelPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for licensed Intel software""" """Provides appropriate overrides for licensed Intel software"""
base_class_name = "IntelOneApiPackage" base_class_name = "IntelPackage"
body_def = """\ body_def = """\
# FIXME: Override `setup_environment` if necessary.""" # FIXME: Override `setup_environment` if necessary."""
@@ -874,7 +874,7 @@ def get_name(name, url):
result = simplify_name(result) result = simplify_name(result)
if not re.match(r"^[a-z0-9-]+$", result): if not valid_fully_qualified_module_name(result):
tty.die("Package name can only contain a-z, 0-9, and '-'") tty.die("Package name can only contain a-z, 0-9, and '-'")
return result return result

View File

@@ -102,7 +102,7 @@ def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Sp
) )
else: else:
# look up the maximum version so infintiy versions are preferred for develop # look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys()) version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}") tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version]) spec.versions = spack.version.VersionList([version])

View File

@@ -62,7 +62,7 @@ def setup_parser(subparser):
"package Spack knows how to find." "package Spack knows how to find."
) )
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name") sp.add_parser("list", help="list detectable packages, by repository and name")
read_cray_manifest = sp.add_parser( read_cray_manifest = sp.add_parser(
"read-cray-manifest", "read-cray-manifest",
@@ -259,7 +259,6 @@ def external(parser, args):
action = { action = {
"find": external_find, "find": external_find,
"list": external_list, "list": external_list,
"ls": external_list,
"read-cray-manifest": external_read_cray_manifest, "read-cray-manifest": external_read_cray_manifest,
} }
action[args.external_command](args) action[args.external_command](args)

View File

@@ -51,12 +51,6 @@ def setup_parser(subparser):
"-I", "--install-status", action="store_true", help="show install status of packages" "-I", "--install-status", action="store_true", help="show install status of packages"
) )
subparser.add_argument(
"--specfile-format",
action="store_true",
help="show the specfile format for installed deps ",
)
subparser.add_argument( subparser.add_argument(
"-d", "--deps", action="store_true", help="output dependencies along with found specs" "-d", "--deps", action="store_true", help="output dependencies along with found specs"
) )
@@ -286,7 +280,6 @@ def root_decorator(spec, string):
show_flags=True, show_flags=True,
decorator=root_decorator, decorator=root_decorator,
variants=True, variants=True,
specfile_format=args.specfile_format,
) )
print() print()
@@ -308,7 +301,6 @@ def root_decorator(spec, string):
namespace=True, namespace=True,
show_flags=True, show_flags=True,
variants=True, variants=True,
specfile_format=args.specfile_format,
) )
print() print()
@@ -398,12 +390,7 @@ def find(parser, args):
if args.show_concretized: if args.show_concretized:
display_results += concretized_but_not_installed display_results += concretized_but_not_installed
cmd.display_specs( cmd.display_specs(
display_results, display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
args,
decorator=decorator,
all_headers=True,
status_fn=status_fn,
specfile_format=args.specfile_format,
) )
# print number of installed packages last (as the list may be long) # print number of installed packages last (as the list may be long)

View File

@@ -10,13 +10,11 @@
import re import re
import sys import sys
from html import escape from html import escape
from typing import Type
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack.deptypes as dt import spack.deptypes as dt
import spack.package_base
import spack.repo import spack.repo
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.version import VersionList from spack.version import VersionList
@@ -141,10 +139,10 @@ def name_only(pkgs, out):
tty.msg("%d packages" % len(pkgs)) tty.msg("%d packages" % len(pkgs))
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str: def github_url(pkg):
"""Link to a package file on github.""" """Link to a package file on github."""
mod_path = pkg.__module__.replace(".", "/") url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py" return url.format(pkg.name)
def rows_for_ncols(elts, ncols): def rows_for_ncols(elts, ncols):

View File

@@ -89,17 +89,17 @@ def setup_parser(subparser):
def pkg_add(args): def pkg_add(args):
"""add a package to the git stage with `git add`""" """add a package to the git stage with `git add`"""
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo()) spack.repo.add_package_to_git_stage(args.packages)
def pkg_list(args): def pkg_list(args):
"""list packages associated with a particular spack git revision""" """list packages associated with a particular spack git revision"""
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo())) colify(spack.repo.list_packages(args.rev))
def pkg_diff(args): def pkg_diff(args):
"""compare packages available in two different git revisions""" """compare packages available in two different git revisions"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo()) u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u1: if u1:
print("%s:" % args.rev1) print("%s:" % args.rev1)
@@ -114,23 +114,21 @@ def pkg_diff(args):
def pkg_removed(args): def pkg_removed(args):
"""show packages removed since a commit""" """show packages removed since a commit"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo()) u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u1: if u1:
colify(sorted(u1)) colify(sorted(u1))
def pkg_added(args): def pkg_added(args):
"""show packages added since a commit""" """show packages added since a commit"""
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo()) u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
if u2: if u2:
colify(sorted(u2)) colify(sorted(u2))
def pkg_changed(args): def pkg_changed(args):
"""show packages changed since a commit""" """show packages changed since a commit"""
packages = spack.repo.get_all_package_diffs( packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
)
if packages: if packages:
colify(sorted(packages)) colify(sorted(packages))

View File

@@ -4,7 +4,6 @@
import os import os
import sys import sys
from typing import List
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -25,7 +24,9 @@ def setup_parser(subparser):
create_parser = sp.add_parser("create", help=repo_create.__doc__) create_parser = sp.add_parser("create", help=repo_create.__doc__)
create_parser.add_argument("directory", help="directory to create the repo in") create_parser.add_argument("directory", help="directory to create the repo in")
create_parser.add_argument( create_parser.add_argument(
"namespace", help="name or namespace to identify packages in the repository" "namespace",
help="namespace to identify packages in the repository (defaults to the directory name)",
nargs="?",
) )
create_parser.add_argument( create_parser.add_argument(
"-d", "-d",
@@ -137,7 +138,7 @@ def repo_remove(args):
def repo_list(args): def repo_list(args):
"""show registered repositories and their namespaces""" """show registered repositories and their namespaces"""
roots = spack.config.get("repos", scope=args.scope) roots = spack.config.get("repos", scope=args.scope)
repos: List[spack.repo.Repo] = [] repos = []
for r in roots: for r in roots:
try: try:
repos.append(spack.repo.from_path(r)) repos.append(spack.repo.from_path(r))
@@ -145,14 +146,17 @@ def repo_list(args):
continue continue
if sys.stdout.isatty(): if sys.stdout.isatty():
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies.")) msg = "%d package repositor" % len(repos)
msg += "y." if len(repos) == 1 else "ies."
tty.msg(msg)
if not repos: if not repos:
return return
max_ns_len = max(len(r.namespace) for r in repos) max_ns_len = max(len(r.namespace) for r in repos)
for repo in repos: for repo in repos:
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}") fmt = "%%-%ds%%s" % (max_ns_len + 4)
print(fmt % (repo.namespace, repo.root))
def repo(parser, args): def repo(parser, args):

View File

@@ -136,7 +136,20 @@ def solve(parser, args):
setup_only = set(show) == {"asp"} setup_only = set(show) == {"asp"}
unify = spack.config.get("concretizer:unify") unify = spack.config.get("concretizer:unify")
allow_deprecated = spack.config.get("config:deprecated", False) allow_deprecated = spack.config.get("config:deprecated", False)
if unify == "when_possible": if unify != "when_possible":
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
for idx, result in enumerate( for idx, result in enumerate(
solver.solve_in_rounds( solver.solve_in_rounds(
specs, specs,
@@ -153,29 +166,3 @@ def solve(parser, args):
print("% END ROUND {0}\n".format(idx)) print("% END ROUND {0}\n".format(idx))
if not setup_only: if not setup_only:
_process_result(result, show, required_format, kwargs) _process_result(result, show, required_format, kwargs)
elif unify:
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
for spec in specs:
tty.msg("SOLVING SPEC:", spec)
result = solver.solve(
[spec],
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)

View File

@@ -59,7 +59,7 @@ def is_package(f):
packages, since we allow `from spack import *` and poking globals packages, since we allow `from spack import *` and poking globals
into packages. into packages.
""" """
return f.startswith("var/spack/") and f.endswith("package.py") return f.startswith("var/spack/repos/") and f.endswith("package.py")
#: decorator for adding tools to the list #: decorator for adding tools to the list
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
def _module_part(root: str, expr: str): def _module_part(root: str, expr: str):
parts = expr.split(".") parts = expr.split(".")
# spack.pkg is for repositories, don't try to resolve it here. # spack.pkg is for repositories, don't try to resolve it here.
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg": if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
return None return None
while parts: while parts:
f1 = os.path.join(root, "lib", "spack", *parts) + ".py" f1 = os.path.join(root, "lib", "spack", *parts) + ".py"

View File

@@ -65,12 +65,6 @@ def setup_parser(subparser):
run_parser.add_argument( run_parser.add_argument(
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting" "--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
) )
run_parser.add_argument(
"--timeout",
type=int,
default=None,
help="maximum time (in seconds) that tests are allowed to run",
)
cd_group = run_parser.add_mutually_exclusive_group() cd_group = run_parser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ["clean", "dirty"]) arguments.add_common_arguments(cd_group, ["clean", "dirty"])
@@ -182,7 +176,7 @@ def test_run(args):
for spec in specs: for spec in specs:
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit) matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching: if spec and not matching:
tty.warn(f"No {explicit_str}installed packages match spec {spec}") tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
# TODO: Need to write out a log message and/or CDASH Testing # TODO: Need to write out a log message and/or CDASH Testing
# output that package not installed IF continue to process # output that package not installed IF continue to process
@@ -198,7 +192,7 @@ def test_run(args):
# test_stage_dir # test_stage_dir
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias) test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
test_suite.ensure_stage() test_suite.ensure_stage()
tty.msg(f"Spack test {test_suite.name}") tty.msg("Spack test %s" % test_suite.name)
# Set up reporter # Set up reporter
setattr(args, "package", [s.format() for s in test_suite.specs]) setattr(args, "package", [s.format() for s in test_suite.specs])
@@ -210,7 +204,6 @@ def test_run(args):
dirty=args.dirty, dirty=args.dirty,
fail_first=args.fail_first, fail_first=args.fail_first,
externals=args.externals, externals=args.externals,
timeout=args.timeout,
) )

View File

@@ -18,10 +18,6 @@ class Languages(enum.Enum):
class CompilerAdaptor: class CompilerAdaptor:
"""Provides access to compiler attributes via `Package.compiler`. Useful for
packages which do not yet access compiler properties via `self.spec[language]`.
"""
def __init__( def __init__(
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec] self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
) -> None: ) -> None:
@@ -83,14 +79,6 @@ def implicit_rpaths(self) -> List[str]:
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths()) result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
return result return result
@property
def opt_flags(self) -> List[str]:
return next(iter(self.compilers.values())).package.opt_flags
@property
def debug_flags(self) -> List[str]:
return next(iter(self.compilers.values())).package.debug_flags
@property @property
def openmp_flag(self) -> str: def openmp_flag(self) -> str:
return next(iter(self.compilers.values())).package.openmp_flag return next(iter(self.compilers.values())).package.openmp_flag
@@ -152,7 +140,7 @@ def c17_flag(self) -> str:
@property @property
def c23_flag(self) -> str: def c23_flag(self) -> str:
return self.compilers[Languages.C].package.standard_flag( return self.compilers[Languages.C].package.standard_flag(
language=Languages.C.value, standard="23" language=Languages.C.value, standard="17"
) )
@property @property
@@ -202,10 +190,6 @@ def f77(self):
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN) self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
return self.compilers[Languages.FORTRAN].package.fortran return self.compilers[Languages.FORTRAN].package.fortran
@property
def stdcxx_libs(self):
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
class DeprecatedCompiler(lang.DeprecatedProperty): class DeprecatedCompiler(lang.DeprecatedProperty):
def __init__(self) -> None: def __init__(self) -> None:

View File

@@ -7,7 +7,6 @@
import os import os
import re import re
import sys import sys
import warnings
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
import archspec.cpu import archspec.cpu
@@ -338,15 +337,7 @@ def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List[spack.spec.Spec]:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls))) pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))] filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
try: detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
except Exception:
warnings.warn(
f"[{__name__}] cannot detect {pkg_name} from the "
f"following paths: {', '.join(filtered_paths)}"
)
continue
for s in detected: for s in detected:
for key in ("flags", "environment", "extra_rpaths"): for key in ("flags", "environment", "extra_rpaths"):
if key in compiler_dict: if key in compiler_dict:

View File

@@ -149,12 +149,12 @@ def _getfqdn():
return socket.getfqdn() return socket.getfqdn()
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]: def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
reader_cls = { reader_cls = {
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1, vn.Version("5"): spack.spec.SpecfileV1,
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3, vn.Version("6"): spack.spec.SpecfileV3,
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4, vn.Version("7"): spack.spec.SpecfileV4,
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5, vn.Version("8"): spack.spec.SpecfileV5,
} }
return reader_cls[version] return reader_cls[version]
@@ -824,7 +824,7 @@ def check(cond, msg):
db = fdata["database"] db = fdata["database"]
check("version" in db, "no 'version' in JSON DB.") check("version" in db, "no 'version' in JSON DB.")
self.db_version = vn.StandardVersion.from_string(db["version"]) self.db_version = vn.Version(db["version"])
if self.db_version > _DB_VERSION: if self.db_version > _DB_VERSION:
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version) raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
elif self.db_version < _DB_VERSION: elif self.db_version < _DB_VERSION:

View File

@@ -20,7 +20,7 @@
import sys import sys
from typing import Dict, List, Optional, Set, Tuple, Union from typing import Dict, List, Optional, Set, Tuple, Union
from llnl.util import tty import llnl.util.tty
import spack.config import spack.config
import spack.error import spack.error
@@ -93,13 +93,14 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
except spack.error.SpackError: except spack.error.SpackError:
# It is assumed here that we can at least extract the package name from the spec so we # It is assumed here that we can at least extract the package name from the spec so we
# can look up the implementation of determine_spec_details # can look up the implementation of determine_spec_details
tty.warn(f"Constructed spec for {spec.name} does not have a string representation") msg = f"Constructed spec for {spec.name} does not have a string representation"
llnl.util.tty.warn(msg)
return False return False
try: try:
spack.spec.Spec(str(spec)) spack.spec.Spec(str(spec))
except spack.error.SpackError: except spack.error.SpackError:
tty.warn( llnl.util.tty.warn(
"Constructed spec has a string representation but the string" "Constructed spec has a string representation but the string"
" representation does not evaluate to a valid spec: {0}".format(str(spec)) " representation does not evaluate to a valid spec: {0}".format(str(spec))
) )
@@ -108,24 +109,20 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
return True return True
def path_to_dict(search_paths: List[str]) -> Dict[str, str]: def path_to_dict(search_paths: List[str]):
"""Return dictionary[fullpath]: basename from list of paths""" """Return dictionary[fullpath]: basename from list of paths"""
path_to_lib: Dict[str, str] = {} path_to_lib = {}
# Reverse order of search directories so that a lib in the first # Reverse order of search directories so that a lib in the first
# entry overrides later entries # entry overrides later entries
for search_path in reversed(search_paths): for search_path in reversed(search_paths):
try: try:
dir_iter = os.scandir(search_path) with os.scandir(search_path) as entries:
path_to_lib.update(
{entry.path: entry.name for entry in entries if entry.is_file()}
)
except OSError as e: except OSError as e:
tty.debug(f"cannot scan '{search_path}' for external software: {e}") msg = f"cannot scan '{search_path}' for external software: {str(e)}"
continue llnl.util.tty.debug(msg)
with dir_iter as entries:
for entry in entries:
try:
if entry.is_file():
path_to_lib[entry.path] = entry.name
except OSError as e:
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
return path_to_lib return path_to_lib

View File

@@ -34,13 +34,11 @@ class OpenMpi(Package):
import collections.abc import collections.abc
import os import os
import re import re
import warnings
from typing import Any, Callable, List, Optional, Tuple, Type, Union from typing import Any, Callable, List, Optional, Tuple, Type, Union
import llnl.util.tty.color import llnl.util.tty.color
import spack.deptypes as dt import spack.deptypes as dt
import spack.error
import spack.fetch_strategy import spack.fetch_strategy
import spack.package_base import spack.package_base
import spack.patch import spack.patch
@@ -610,7 +608,7 @@ def _execute_patch(
return _execute_patch return _execute_patch
def conditional(*values: Union[str, bool], when: Optional[WhenType] = None): def conditional(*values: List[Any], when: Optional[WhenType] = None):
"""Conditional values that can be used in variant declarations.""" """Conditional values that can be used in variant declarations."""
# _make_when_spec returns None when the condition is statically false. # _make_when_spec returns None when the condition is statically false.
when = _make_when_spec(when) when = _make_when_spec(when)
@@ -622,7 +620,7 @@ def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
@directive("variants") @directive("variants")
def variant( def variant(
name: str, name: str,
default: Optional[Union[bool, str, Tuple[str, ...]]] = None, default: Optional[Any] = None,
description: str = "", description: str = "",
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None, values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
multi: Optional[bool] = None, multi: Optional[bool] = None,
@@ -652,29 +650,11 @@ def variant(
DirectiveError: If arguments passed to the directive are invalid DirectiveError: If arguments passed to the directive are invalid
""" """
# This validation can be removed at runtime and enforced with an audit in Spack v1.0.
# For now it's a warning to let people migrate faster.
if not (
default is None
or type(default) in (bool, str)
or (type(default) is tuple and all(type(x) is str for x in default))
):
if isinstance(default, (list, tuple)):
did_you_mean = f"default={','.join(str(x) for x in default)!r}"
else:
did_you_mean = f"default={str(default)!r}"
warnings.warn(
f"default value for variant '{name}' is not a boolean or string: default={default!r}. "
f"Did you mean {did_you_mean}?",
stacklevel=3,
category=spack.error.SpackAPIWarning,
)
def format_error(msg, pkg): def format_error(msg, pkg):
msg += " @*r{{[{0}, variant '{1}']}}" msg += " @*r{{[{0}, variant '{1}']}}"
return llnl.util.tty.color.colorize(msg.format(pkg.name, name)) return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
if name in spack.variant.RESERVED_NAMES: if name in spack.variant.reserved_names:
def _raise_reserved_name(pkg): def _raise_reserved_name(pkg):
msg = "The name '%s' is reserved by Spack" % name msg = "The name '%s' is reserved by Spack" % name
@@ -685,11 +665,7 @@ def _raise_reserved_name(pkg):
# Ensure we have a sequence of allowed variant values, or a # Ensure we have a sequence of allowed variant values, or a
# predicate for it. # predicate for it.
if values is None: if values is None:
if ( if str(default).upper() in ("TRUE", "FALSE"):
default in (True, False)
or type(default) is str
and default.upper() in ("TRUE", "FALSE")
):
values = (True, False) values = (True, False)
else: else:
values = lambda x: True values = lambda x: True
@@ -722,15 +698,12 @@ def _raise_argument_error(pkg):
# or the empty string, as the former indicates that a default # or the empty string, as the former indicates that a default
# was not set while the latter will make the variant unparsable # was not set while the latter will make the variant unparsable
# from the command line # from the command line
if isinstance(default, tuple):
default = ",".join(default)
if default is None or default == "": if default is None or default == "":
def _raise_default_not_set(pkg): def _raise_default_not_set(pkg):
if default is None: if default is None:
msg = "either a default was not explicitly set, or 'None' was used" msg = "either a default was not explicitly set, " "or 'None' was used"
else: elif default == "":
msg = "the default cannot be an empty string" msg = "the default cannot be an empty string"
raise DirectiveError(format_error(msg, pkg)) raise DirectiveError(format_error(msg, pkg))

View File

@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
# The instance is being initialized: if it is a package we must ensure # The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up. # that the directives are called to set it up.
if spack.repo.is_package_module(cls.__module__): if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
# Ensure the presence of the dictionaries associated with the directives. # Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys. # All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names: for d in DirectiveMeta._directive_dict_names:
@@ -144,6 +144,7 @@ class Foo(Package):
Package class, and it's how Spack gets information from the Package class, and it's how Spack gets information from the
packages to the core. packages to the core.
""" """
global directive_names
if isinstance(dicts, str): if isinstance(dicts, str):
dicts = (dicts,) dicts = (dicts,)

View File

@@ -566,7 +566,7 @@
display_specs, display_specs,
environment_dir_from_name, environment_dir_from_name,
environment_from_name_or_dir, environment_from_name_or_dir,
environment_path_scope, environment_path_scopes,
exists, exists,
initialize_environment_dir, initialize_environment_dir,
installed_specs, installed_specs,
@@ -603,7 +603,7 @@
"display_specs", "display_specs",
"environment_dir_from_name", "environment_dir_from_name",
"environment_from_name_or_dir", "environment_from_name_or_dir",
"environment_path_scope", "environment_path_scopes",
"exists", "exists",
"initialize_environment_dir", "initialize_environment_dir",
"installed_specs", "installed_specs",

View File

@@ -31,6 +31,7 @@
import spack.repo import spack.repo
import spack.schema.env import spack.schema.env
import spack.spec import spack.spec
import spack.spec_list
import spack.store import spack.store
import spack.user_environment as uenv import spack.user_environment as uenv
import spack.util.environment import spack.util.environment
@@ -43,10 +44,10 @@
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec from spack.spec import Spec
from spack.spec_list import SpecList
from spack.util.path import substitute_path_variables from spack.util.path import substitute_path_variables
from ..enums import ConfigScopePriority from ..enums import ConfigScopePriority
from .list import SpecList, SpecListError, SpecListParser
SpecPair = spack.concretize.SpecPair SpecPair = spack.concretize.SpecPair
@@ -96,15 +97,16 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
return path_str return path_str
def ensure_no_disallowed_env_config_mods(scope: spack.config.ConfigScope) -> None: def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
config = scope.get_section("config") for scope in scopes:
if config and "environments_root" in config["config"]: config = scope.get_section("config")
raise SpackEnvironmentError( if config and "environments_root" in config["config"]:
"Spack environments are prohibited from modifying 'config:environments_root' " raise SpackEnvironmentError(
"because it can make the definition of the environment ill-posed. Please " "Spack environments are prohibited from modifying 'config:environments_root' "
"remove from your environment and place it in a permanent scope such as " "because it can make the definition of the environment ill-posed. Please "
"defaults, system, site, etc." "remove from your environment and place it in a permanent scope such as "
) "defaults, system, site, etc."
)
def default_manifest_yaml(): def default_manifest_yaml():
@@ -931,10 +933,8 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
self.new_specs: List[Spec] = [] self.new_specs: List[Spec] = []
self.views: Dict[str, ViewDescriptor] = {} self.views: Dict[str, ViewDescriptor] = {}
#: Parser for spec lists
self._spec_lists_parser = SpecListParser()
#: Specs from "spack.yaml" #: Specs from "spack.yaml"
self.spec_lists: Dict[str, SpecList] = {} self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
#: User specs from the last concretization #: User specs from the last concretization
self.concretized_user_specs: List[Spec] = [] self.concretized_user_specs: List[Spec] = []
#: Roots associated with the last concretization, in order #: Roots associated with the last concretization, in order
@@ -1002,6 +1002,26 @@ def write_transaction(self):
"""Get a write lock context manager for use in a `with` block.""" """Get a write lock context manager for use in a `with` block."""
return lk.WriteTransaction(self.txlock, acquire=self._re_read) return lk.WriteTransaction(self.txlock, acquire=self._re_read)
def _process_definition(self, entry):
"""Process a single spec definition item."""
when_string = entry.get("when")
if when_string is not None:
when = spack.spec.eval_conditional(when_string)
assert len([x for x in entry if x != "when"]) == 1
else:
when = True
assert len(entry) == 1
if when:
for name, spec_list in entry.items():
if name == "when":
continue
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
if name in self.spec_lists:
self.spec_lists[name].extend(user_specs)
else:
self.spec_lists[name] = user_specs
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]): def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
"""Process view option(s), which can be boolean, string, or None. """Process view option(s), which can be boolean, string, or None.
@@ -1063,24 +1083,21 @@ def _process_concrete_includes(self):
def _construct_state_from_manifest(self): def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file.""" """Set up user specs and views from the manifest file."""
self.spec_lists = collections.OrderedDict()
self.views = {} self.views = {}
self._sync_speclists()
self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def _sync_speclists(self): for item in spack.config.get("definitions", []):
self.spec_lists = {} self._process_definition(item)
self.spec_lists.update(
self._spec_lists_parser.parse_definitions(
data=spack.config.CONFIG.get("definitions", [])
)
)
env_configuration = self.manifest[TOP_LEVEL_KEY] env_configuration = self.manifest[TOP_LEVEL_KEY]
spec_list = env_configuration.get(user_speclist_name, []) spec_list = env_configuration.get(user_speclist_name, [])
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs( user_specs = SpecList(
name=user_speclist_name, yaml_list=spec_list user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
) )
self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def all_concretized_user_specs(self) -> List[Spec]: def all_concretized_user_specs(self) -> List[Spec]:
"""Returns all of the concretized user specs of the environment and """Returns all of the concretized user specs of the environment and
@@ -1151,7 +1168,9 @@ def clear(self, re_read=False):
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml, re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
and needs to be maintained when re-reading an existing environment. and needs to be maintained when re-reading an existing environment.
""" """
self.spec_lists = {} self.spec_lists = collections.OrderedDict()
self.spec_lists[user_speclist_name] = SpecList()
self._dev_specs = {} self._dev_specs = {}
self.concretized_order = [] # roots of last concretize, in order self.concretized_order = [] # roots of last concretize, in order
self.concretized_user_specs = [] # user specs from last concretize self.concretized_user_specs = [] # user specs from last concretize
@@ -1258,6 +1277,22 @@ def destroy(self):
"""Remove this environment from Spack entirely.""" """Remove this environment from Spack entirely."""
shutil.rmtree(self.path) shutil.rmtree(self.path)
def update_stale_references(self, from_list=None):
"""Iterate over spec lists updating references."""
if not from_list:
from_list = next(iter(self.spec_lists.keys()))
index = list(self.spec_lists.keys()).index(from_list)
# spec_lists is an OrderedDict to ensure lists read from the manifest
# are maintainted in order, hence, all list entries after the modified
# list may refer to the modified list requiring stale references to be
# updated.
for i, (name, speclist) in enumerate(
list(self.spec_lists.items())[index + 1 :], index + 1
):
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
speclist.update_reference(new_reference)
def add(self, user_spec, list_name=user_speclist_name): def add(self, user_spec, list_name=user_speclist_name):
"""Add a single user_spec (non-concretized) to the Environment """Add a single user_spec (non-concretized) to the Environment
@@ -1277,17 +1312,18 @@ def add(self, user_spec, list_name=user_speclist_name):
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash: elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
virtuals = spack.repo.PATH.provider_index.providers.keys() virtuals = spack.repo.PATH.provider_index.providers.keys()
if spec.name not in virtuals: if spec.name not in virtuals:
raise SpackEnvironmentError(f"no such package: {spec.name}") msg = "no such package: %s" % spec.name
raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name] list_to_change = self.spec_lists[list_name]
existing = str(spec) in list_to_change.yaml_list existing = str(spec) in list_to_change.yaml_list
if not existing: if not existing:
list_to_change.add(str(spec)) list_to_change.add(str(spec))
self.update_stale_references(list_name)
if list_name == user_speclist_name: if list_name == user_speclist_name:
self.manifest.add_user_spec(str(user_spec)) self.manifest.add_user_spec(str(user_spec))
else: else:
self.manifest.add_definition(str(user_spec), list_name=list_name) self.manifest.add_definition(str(user_spec), list_name=list_name)
self._sync_speclists()
return bool(not existing) return bool(not existing)
@@ -1331,17 +1367,18 @@ def change_existing_spec(
"There are no specs named {0} in {1}".format(match_spec.name, list_name) "There are no specs named {0} in {1}".format(match_spec.name, list_name)
) )
elif len(matches) > 1 and not allow_changing_multiple_specs: elif len(matches) > 1 and not allow_changing_multiple_specs:
raise ValueError(f"{str(match_spec)} matches multiple specs") raise ValueError("{0} matches multiple specs".format(str(match_spec)))
for idx, spec in matches: for idx, spec in matches:
override_spec = Spec.override(spec, change_spec) override_spec = Spec.override(spec, change_spec)
self.spec_lists[list_name].replace(idx, str(override_spec))
if list_name == user_speclist_name: if list_name == user_speclist_name:
self.manifest.override_user_spec(str(override_spec), idx=idx) self.manifest.override_user_spec(str(override_spec), idx=idx)
else: else:
self.manifest.override_definition( self.manifest.override_definition(
str(spec), override=str(override_spec), list_name=list_name str(spec), override=str(override_spec), list_name=list_name
) )
self._sync_speclists() self.update_stale_references(from_list=list_name)
def remove(self, query_spec, list_name=user_speclist_name, force=False): def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec""" """Remove specs from an environment that match a query_spec"""
@@ -1369,17 +1406,22 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches") raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
old_specs = set(self.user_specs) old_specs = set(self.user_specs)
new_specs = set()
# Remove specs from the appropriate spec list
for spec in matches: for spec in matches:
if spec not in list_to_change: if spec not in list_to_change:
continue continue
try: try:
list_to_change.remove(spec) list_to_change.remove(spec)
except SpecListError as e: self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError as e:
# define new specs list
new_specs = set(self.user_specs)
msg = str(e) msg = str(e)
if force: if force:
msg += " It will be removed from the concrete specs." msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
new_specs.remove(spec)
tty.warn(msg) tty.warn(msg)
else: else:
if list_name == user_speclist_name: if list_name == user_speclist_name:
@@ -1387,11 +1429,7 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
else: else:
self.manifest.remove_definition(str(spec), list_name=list_name) self.manifest.remove_definition(str(spec), list_name=list_name)
# Recompute "definitions" and user specs # If force, update stale concretized specs
self._sync_speclists()
new_specs = set(self.user_specs)
# If 'force', update stale concretized specs
for spec in old_specs - new_specs: for spec in old_specs - new_specs:
if force and spec in self.concretized_user_specs: if force and spec in self.concretized_user_specs:
i = self.concretized_user_specs.index(spec) i = self.concretized_user_specs.index(spec)
@@ -1605,6 +1643,23 @@ def _concretize_separately(self, tests=False):
# Unify the specs objects, so we get correct references to all parents # Unify the specs objects, so we get correct references to all parents
self._read_lockfile_dict(self._to_lockfile_dict()) self._read_lockfile_dict(self._to_lockfile_dict())
# Re-attach information on test dependencies
if tests:
# This is slow, but the information on test dependency is lost
# after unification or when reading from a lockfile.
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
for current_edge in test_edges:
test_dependency = current_edge.spec
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
)
return concretized_specs return concretized_specs
@property @property
@@ -2312,12 +2367,8 @@ def update_environment_repository(self) -> None:
def _add_to_environment_repository(self, spec_node: Spec) -> None: def _add_to_environment_repository(self, spec_node: Spec) -> None:
"""Add the root node of the spec to the environment repository""" """Add the root node of the spec to the environment repository"""
namespace: str = spec_node.namespace repository_dir = os.path.join(self.repos_path, spec_node.namespace)
repository = spack.repo.create_or_construct( repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
root=os.path.join(self.repos_path, namespace),
namespace=namespace,
package_api=spack.repo.PATH.get_repo(namespace).package_api,
)
pkg_dir = repository.dirname_for_package_name(spec_node.name) pkg_dir = repository.dirname_for_package_name(spec_node.name)
fs.mkdirp(pkg_dir) fs.mkdirp(pkg_dir)
spack.repo.PATH.dump_provenance(spec_node, pkg_dir) spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
@@ -2666,9 +2717,9 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
self.scope_name = f"env:{self.name}" self.scope_name = f"env:{self.name}"
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config") self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
#: Configuration scope associated with this environment. Note that this is not #: Configuration scopes associated with this environment. Note that these are not
#: invalidated by a re-read of the manifest file. #: invalidated by a re-read of the manifest file.
self._env_config_scope: Optional[spack.config.ConfigScope] = None self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
if not self.manifest_file.exists(): if not self.manifest_file.exists():
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}" msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
@@ -2777,8 +2828,6 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
item[list_name].append(user_spec) item[list_name].append(user_spec)
break break
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True self.changed = True
def remove_definition(self, user_spec: str, list_name: str) -> None: def remove_definition(self, user_spec: str, list_name: str) -> None:
@@ -2805,8 +2854,6 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
except ValueError: except ValueError:
pass pass
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True self.changed = True
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None: def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
@@ -2832,8 +2879,6 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
except ValueError: except ValueError:
pass pass
# "definitions" can be remote, so we need to update the global config too
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
self.changed = True self.changed = True
def _iterate_on_definitions(self, definitions, *, list_name, err_msg): def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
@@ -2912,27 +2957,33 @@ def __str__(self):
return str(self.manifest_file) return str(self.manifest_file)
@property @property
def env_config_scope(self) -> spack.config.ConfigScope: def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""The configuration scope for the environment manifest""" """A list of all configuration scopes for the environment manifest. On the first call this
if self._env_config_scope is None: instantiates all the scopes, on subsequent calls it returns the cached list."""
self._env_config_scope = spack.config.SingleFileScope( if self._config_scopes is not None:
return self._config_scopes
scopes: List[spack.config.ConfigScope] = [
spack.config.SingleFileScope(
self.scope_name, self.scope_name,
str(self.manifest_file), str(self.manifest_file),
spack.schema.env.schema, spack.schema.env.schema,
yaml_path=[TOP_LEVEL_KEY], yaml_path=[TOP_LEVEL_KEY],
) )
ensure_no_disallowed_env_config_mods(self._env_config_scope) ]
return self._env_config_scope ensure_no_disallowed_env_config_mods(scopes)
self._config_scopes = scopes
return scopes
def prepare_config_scope(self) -> None: def prepare_config_scope(self) -> None:
"""Add the manifest's scope to the global configuration search path.""" """Add the manifest's scopes to the global configuration search path."""
spack.config.CONFIG.push_scope( for scope in self.env_config_scopes:
self.env_config_scope, priority=ConfigScopePriority.ENVIRONMENT spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
)
def deactivate_config_scope(self) -> None: def deactivate_config_scope(self) -> None:
"""Remove the manifest's scope from the global config path.""" """Remove any of the manifest's scopes from the global config path."""
spack.config.CONFIG.remove_scope(self.env_config_scope.name) for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name)
@contextlib.contextmanager @contextlib.contextmanager
def use_config(self): def use_config(self):
@@ -2943,8 +2994,8 @@ def use_config(self):
self.deactivate_config_scope() self.deactivate_config_scope()
def environment_path_scope(name: str, path: str) -> Optional[spack.config.ConfigScope]: def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
"""Retrieve the suitably named environment path scope """Retrieve the suitably named environment path scopes
Arguments: Arguments:
name: configuration scope name name: configuration scope name
@@ -2959,9 +3010,11 @@ def environment_path_scope(name: str, path: str) -> Optional[spack.config.Config
else: else:
return None return None
manifest.env_config_scope.name = f"{name}:{manifest.env_config_scope.name}" for scope in manifest.env_config_scopes:
manifest.env_config_scope.writable = False scope.name = f"{name}:{scope.name}"
return manifest.env_config_scope scope.writable = False
return manifest.env_config_scopes
class SpackEnvironmentError(spack.error.SpackError): class SpackEnvironmentError(spack.error.SpackError):

View File

@@ -1,286 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
from typing import Any, Dict, List, NamedTuple, Optional, Union
import spack.spec
import spack.util.spack_yaml
import spack.variant
from spack.error import SpackError
from spack.spec import Spec
class SpecList:
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
self.name = name
self.yaml_list = yaml_list[:] if yaml_list is not None else []
# Expansions can be expensive to compute and difficult to keep updated
# We cache results and invalidate when self.yaml_list changes
self.specs_as_yaml_list = expanded_list or []
self._constraints = None
self._specs: Optional[List[Spec]] = None
@property
def is_matrix(self):
for item in self.specs_as_yaml_list:
if isinstance(item, dict):
return True
return False
@property
def specs_as_constraints(self):
if self._constraints is None:
constraints = []
for item in self.specs_as_yaml_list:
if isinstance(item, dict): # matrix of specs
constraints.extend(_expand_matrix_constraints(item))
else: # individual spec
constraints.append([Spec(item)])
self._constraints = constraints
return self._constraints
@property
def specs(self) -> List[Spec]:
if self._specs is None:
specs: List[Spec] = []
# This could be slightly faster done directly from yaml_list,
# but this way is easier to maintain.
for constraint_list in self.specs_as_constraints:
spec = constraint_list[0].copy()
for const in constraint_list[1:]:
spec.constrain(const)
specs.append(spec)
self._specs = specs
return self._specs
def add(self, spec: Spec):
spec_str = str(spec)
self.yaml_list.append(spec_str)
# expanded list can be updated without invalidation
if self.specs_as_yaml_list is not None:
self.specs_as_yaml_list.append(spec_str)
# Invalidate cache variables when we change the list
self._constraints = None
self._specs = None
def remove(self, spec):
# Get spec to remove from list
remove = [
s
for s in self.yaml_list
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
]
if not remove:
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
msg += f"Either {spec} is not in {self.name} or {spec} is "
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
# Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
self.specs_as_yaml_list.remove(item)
# invalidate cache variables when we change the list
self._constraints = None
self._specs = None
def extend(self, other: "SpecList", copy_reference=True) -> None:
self.yaml_list.extend(other.yaml_list)
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
self._constraints = None
self._specs = None
def __len__(self):
return len(self.specs)
def __getitem__(self, key):
return self.specs[key]
def __iter__(self):
return iter(self.specs)
def _expand_matrix_constraints(matrix_config):
# recurse so we can handle nested matrices
expanded_rows = []
for row in matrix_config["matrix"]:
new_row = []
for r in row:
if isinstance(r, dict):
# Flatten the nested matrix into a single row of constraints
new_row.extend(
[
[" ".join([str(c) for c in expanded_constraint_list])]
for expanded_constraint_list in _expand_matrix_constraints(r)
]
)
else:
new_row.append([r])
expanded_rows.append(new_row)
excludes = matrix_config.get("exclude", []) # only compute once
sigil = matrix_config.get("sigil", "")
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
# This method is best effort, so all existing variants will be
# converted before any error is raised.
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.spec.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
# Resolve abstract hashes for exclusion criteria
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
continue
if sigil:
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
# Add to list of constraints
results.append(flat_combo)
return results
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:
item["sigil"] = sigil
return item
else:
return sigil + item
class Definition(NamedTuple):
name: str
yaml_list: List[Union[str, Dict]]
when: Optional[str]
class SpecListParser:
"""Parse definitions and user specs from data in environments"""
def __init__(self):
self.definitions: Dict[str, SpecList] = {}
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
definitions_from_yaml: Dict[str, List[Definition]] = {}
for item in data:
value = self._parse_yaml_definition(item)
definitions_from_yaml.setdefault(value.name, []).append(value)
self.definitions = {}
self._build_definitions(definitions_from_yaml)
return self.definitions
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
definition = Definition(name=name, yaml_list=yaml_list, when=None)
return self._speclist_from_definitions(name, [definition])
def _parse_yaml_definition(self, yaml_entry) -> Definition:
when_string = yaml_entry.get("when")
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
attributes = ", ".join(x for x in yaml_entry if x != "when")
error_msg = f"definition must have a single attribute, got many: {attributes}"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
for name, yaml_list in yaml_entry.items():
if name == "when":
continue
return Definition(name=name, yaml_list=yaml_list, when=when_string)
# If we are here, it means only "when" is in the entry
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
error_msg = "definition must have a single attribute, got none"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
for name, definitions in definitions_from_yaml.items():
self.definitions[name] = self._speclist_from_definitions(name, definitions)
def _speclist_from_definitions(self, name, definitions) -> SpecList:
combined_yaml_list = []
for def_part in definitions:
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
continue
combined_yaml_list.extend(def_part.yaml_list)
expanded_list = self._expand_yaml_list(combined_yaml_list)
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
def _expand_yaml_list(self, raw_yaml_list):
result = []
for item in raw_yaml_list:
if isinstance(item, str) and item.startswith("$"):
result.extend(self._expand_reference(item))
continue
value = item
if isinstance(item, dict):
value = self._expand_yaml_matrix(item)
result.append(value)
return result
def _expand_reference(self, item: str):
sigil, name = "", item[1:]
if name.startswith("^") or name.startswith("%"):
sigil, name = name[0], name[1:]
if name not in self.definitions:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
error_msg = f"trying to expand the name '{name}', which is not defined yet"
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
value = self.definitions[name].specs_as_yaml_list
if not sigil:
return value
return [_sigilify(x, sigil) for x in value]
def _expand_yaml_matrix(self, matrix_yaml):
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
if extra_attributes:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
if "matrix" not in matrix_yaml:
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
error_msg = "matrix is missing the 'matrix' attribute"
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
# Assume data has been validated against the YAML schema
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
if "exclude" in matrix_yaml:
result["exclude"] = matrix_yaml["exclude"]
return result
class SpecListError(SpackError):
"""Error class for all errors related to SpecList objects."""
class UndefinedReferenceError(SpecListError):
"""Error class for undefined references in Spack stacks."""
class InvalidSpecConstraintError(SpecListError):
"""Error class for invalid spec constraints at concretize time."""

View File

@@ -49,23 +49,10 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
cmds += 'set "SPACK_ENV=%s"\n' % env.path cmds += 'set "SPACK_ENV=%s"\n' % env.path
if view: if view:
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
if prompt:
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
if not old_prompt:
old_prompt = os.environ.get("PROMPT")
cmds += f'set "SPACK_OLD_PROMPT={old_prompt}"\n'
cmds += f'set "PROMPT={prompt} $P$G"\n'
elif shell == "pwsh": elif shell == "pwsh":
cmds += "$Env:SPACK_ENV='%s'\n" % env.path cmds += "$Env:SPACK_ENV='%s'\n" % env.path
if view: if view:
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
if prompt:
cmds += (
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
' | Split-Path -leaf; if(!"$Env:SPACK_OLD_PROMPT") '
'{$Env:SPACK_OLD_PROMPT="[spack] PS $pth>"}; '
'"%s PS $pth>"}\n' % prompt
)
else: else:
bash_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=True) bash_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=True)
zsh_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=False, zsh=True) zsh_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=False, zsh=True)
@@ -120,19 +107,10 @@ def deactivate_header(shell):
cmds += 'set "SPACK_ENV="\n' cmds += 'set "SPACK_ENV="\n'
cmds += 'set "SPACK_ENV_VIEW="\n' cmds += 'set "SPACK_ENV_VIEW="\n'
# TODO: despacktivate # TODO: despacktivate
old_prompt = os.environ.get("SPACK_OLD_PROMPT") # TODO: prompt
if old_prompt:
cmds += f'set "PROMPT={old_prompt}"\n'
cmds += 'set "SPACK_OLD_PROMPT="\n'
elif shell == "pwsh": elif shell == "pwsh":
cmds += "Set-Item -Path Env:SPACK_ENV\n" cmds += "Set-Item -Path Env:SPACK_ENV\n"
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n" cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
cmds += (
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
' | Split-Path -leaf; $spack_prompt = "[spack] $pth >"; '
'if("$Env:SPACK_OLD_PROMPT") {$spack_prompt=$Env:SPACK_OLD_PROMPT};'
" $spack_prompt}\n"
)
else: else:
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n" cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
cmds += "unset SPACK_ENV; export SPACK_ENV;\n" cmds += "unset SPACK_ENV; export SPACK_ENV;\n"

View File

@@ -27,14 +27,11 @@
import os import os
import re import re
import shutil import shutil
import sys
import time
import urllib.error import urllib.error
import urllib.parse import urllib.parse
import urllib.request import urllib.request
import urllib.response
from pathlib import PurePath from pathlib import PurePath
from typing import Callable, List, Mapping, Optional from typing import List, Optional
import llnl.url import llnl.url
import llnl.util import llnl.util
@@ -222,114 +219,6 @@ def mirror_id(self):
"""BundlePackages don't have a mirror id.""" """BundlePackages don't have a mirror id."""
def _format_speed(total_bytes: int, elapsed: float) -> str:
"""Return a human-readable average download speed string."""
elapsed = 1 if elapsed <= 0 else elapsed # avoid divide by zero
speed = total_bytes / elapsed
if speed >= 1e9:
return f"{speed / 1e9:6.1f} GB/s"
elif speed >= 1e6:
return f"{speed / 1e6:6.1f} MB/s"
elif speed >= 1e3:
return f"{speed / 1e3:6.1f} KB/s"
return f"{speed:6.1f} B/s"
def _format_bytes(total_bytes: int) -> str:
"""Return a human-readable total bytes string."""
if total_bytes >= 1e9:
return f"{total_bytes / 1e9:7.2f} GB"
elif total_bytes >= 1e6:
return f"{total_bytes / 1e6:7.2f} MB"
elif total_bytes >= 1e3:
return f"{total_bytes / 1e3:7.2f} KB"
return f"{total_bytes:7.2f} B"
class FetchProgress:
#: Characters to rotate in the spinner.
spinner = ["|", "/", "-", "\\"]
def __init__(
self,
total_bytes: Optional[int] = None,
enabled: bool = True,
get_time: Callable[[], float] = time.time,
) -> None:
"""Initialize a FetchProgress instance.
Args:
total_bytes: Total number of bytes to download, if known.
enabled: Whether to print progress information.
get_time: Function to get the current time."""
#: Number of bytes downloaded so far.
self.current_bytes = 0
#: Delta time between progress prints
self.delta = 0.1
#: Whether to print progress information.
self.enabled = enabled
#: Function to get the current time.
self.get_time = get_time
#: Time of last progress print to limit output
self.last_printed = 0.0
#: Time of start of download
self.start_time = get_time() if enabled else 0.0
#: Total number of bytes to download, if known.
self.total_bytes = total_bytes if total_bytes and total_bytes > 0 else 0
#: Index of spinner character to print (used if total bytes is unknown)
self.index = 0
@classmethod
def from_headers(
cls,
headers: Mapping[str, str],
enabled: bool = True,
get_time: Callable[[], float] = time.time,
) -> "FetchProgress":
"""Create a FetchProgress instance from HTTP headers."""
# headers.get is case-insensitive if it's from a HTTPResponse object.
content_length = headers.get("Content-Length")
try:
total_bytes = int(content_length) if content_length else None
except ValueError:
total_bytes = None
return cls(total_bytes=total_bytes, enabled=enabled, get_time=get_time)
def advance(self, num_bytes: int, out=sys.stdout) -> None:
if not self.enabled:
return
self.current_bytes += num_bytes
self.print(out=out)
def print(self, final: bool = False, out=sys.stdout) -> None:
if not self.enabled:
return
current_time = self.get_time()
if self.last_printed + self.delta < current_time or final:
self.last_printed = current_time
# print a newline if this is the final update
maybe_newline = "\n" if final else ""
# if we know the total bytes, show a percentage, otherwise a spinner
if self.total_bytes > 0:
percentage = min(100 * self.current_bytes / self.total_bytes, 100.0)
percent_or_spinner = f"[{percentage:3.0f}%] "
else:
# only show the spinner if we are not at 100%
if final:
percent_or_spinner = "[100%] "
else:
percent_or_spinner = f"[ {self.spinner[self.index]} ] "
self.index = (self.index + 1) % len(self.spinner)
print(
f"\r {percent_or_spinner}{_format_bytes(self.current_bytes)} "
f"@ {_format_speed(self.current_bytes, current_time - self.start_time)}"
f"{maybe_newline}",
end="",
flush=True,
file=out,
)
@fetcher @fetcher
class URLFetchStrategy(FetchStrategy): class URLFetchStrategy(FetchStrategy):
"""URLFetchStrategy pulls source code from a URL for an archive, check the """URLFetchStrategy pulls source code from a URL for an archive, check the
@@ -406,9 +295,8 @@ def fetch(self):
) )
def _fetch_from_url(self, url): def _fetch_from_url(self, url):
fetch_method = spack.config.get("config:url_fetch_method", "urllib") if spack.config.get("config:url_fetch_method") == "curl":
if fetch_method.startswith("curl"): return self._fetch_curl(url)
return self._fetch_curl(url, config_args=fetch_method.split()[1:])
else: else:
return self._fetch_urllib(url) return self._fetch_urllib(url)
@@ -427,7 +315,7 @@ def _check_headers(self, headers):
tty.warn(msg) tty.warn(msg)
@_needs_stage @_needs_stage
def _fetch_urllib(self, url, chunk_size=65536): def _fetch_urllib(self, url):
save_file = self.stage.save_filename save_file = self.stage.save_filename
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT}) request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
@@ -438,15 +326,8 @@ def _fetch_urllib(self, url, chunk_size=65536):
try: try:
response = web_util.urlopen(request) response = web_util.urlopen(request)
tty.msg(f"Fetching {url}") tty.msg(f"Fetching {url}")
progress = FetchProgress.from_headers(response.headers, enabled=sys.stdout.isatty())
with open(save_file, "wb") as f: with open(save_file, "wb") as f:
while True: shutil.copyfileobj(response, f)
chunk = response.read(chunk_size)
if not chunk:
break
f.write(chunk)
progress.advance(len(chunk))
progress.print(final=True)
except OSError as e: except OSError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
@@ -464,7 +345,7 @@ def _fetch_urllib(self, url, chunk_size=65536):
self._check_headers(str(response.headers)) self._check_headers(str(response.headers))
@_needs_stage @_needs_stage
def _fetch_curl(self, url, config_args=[]): def _fetch_curl(self, url):
save_file = None save_file = None
partial_file = None partial_file = None
if self.stage.save_filename: if self.stage.save_filename:
@@ -493,7 +374,7 @@ def _fetch_curl(self, url, config_args=[]):
timeout = self.extra_options.get("timeout") timeout = self.extra_options.get("timeout")
base_args = web_util.base_curl_fetch_args(url, timeout) base_args = web_util.base_curl_fetch_args(url, timeout)
curl_args = config_args + save_args + base_args + cookie_args curl_args = save_args + base_args + cookie_args
# Run curl but grab the mime type from the http headers # Run curl but grab the mime type from the http headers
curl = self.curl curl = self.curl

View File

@@ -12,7 +12,7 @@
import shutil import shutil
import sys import sys
from collections import Counter, OrderedDict from collections import Counter, OrderedDict
from typing import Callable, Iterable, List, Optional, Tuple, Type, TypeVar, Union from typing import Callable, List, Optional, Tuple, Type, TypeVar, Union
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -391,7 +391,7 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
if self.test_failures: if self.test_failures:
raise TestFailure(self.test_failures) raise TestFailure(self.test_failures)
def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None: def stand_alone_tests(self, kwargs):
"""Run the package's stand-alone tests. """Run the package's stand-alone tests.
Args: Args:
@@ -399,9 +399,7 @@ def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
""" """
import spack.build_environment # avoid circular dependency import spack.build_environment # avoid circular dependency
spack.build_environment.start_build_process( spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
self.pkg, test_process, kwargs, timeout=timeout
)
def parts(self) -> int: def parts(self) -> int:
"""The total number of (checked) test parts.""" """The total number of (checked) test parts."""
@@ -849,7 +847,7 @@ def write_test_summary(counts: "Counter"):
class TestSuite: class TestSuite:
"""The class that manages specs for ``spack test run`` execution.""" """The class that manages specs for ``spack test run`` execution."""
def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None: def __init__(self, specs, alias=None):
# copy so that different test suites have different package objects # copy so that different test suites have different package objects
# even if they contain the same spec # even if they contain the same spec
self.specs = [spec.copy() for spec in specs] self.specs = [spec.copy() for spec in specs]
@@ -857,43 +855,42 @@ def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None:
self.current_base_spec = None # spec currently running do_test self.current_base_spec = None # spec currently running do_test
self.alias = alias self.alias = alias
self._hash: Optional[str] = None self._hash = None
self._stage: Optional[Prefix] = None self._stage = None
self.counts: "Counter" = Counter() self.counts: "Counter" = Counter()
@property @property
def name(self) -> str: def name(self):
"""The name (alias or, if none, hash) of the test suite.""" """The name (alias or, if none, hash) of the test suite."""
return self.alias if self.alias else self.content_hash return self.alias if self.alias else self.content_hash
@property @property
def content_hash(self) -> str: def content_hash(self):
"""The hash used to uniquely identify the test suite.""" """The hash used to uniquely identify the test suite."""
if not self._hash: if not self._hash:
json_text = sjson.dump(self.to_dict()) json_text = sjson.dump(self.to_dict())
assert json_text is not None, f"{__name__} unexpected value for 'json_text'"
sha = hashlib.sha1(json_text.encode("utf-8")) sha = hashlib.sha1(json_text.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower() b32_hash = base64.b32encode(sha.digest()).lower()
b32_hash = b32_hash.decode("utf-8") b32_hash = b32_hash.decode("utf-8")
self._hash = b32_hash self._hash = b32_hash
return self._hash return self._hash
def __call__( def __call__(self, *args, **kwargs):
self,
*,
remove_directory: bool = True,
dirty: bool = False,
fail_first: bool = False,
externals: bool = False,
timeout: Optional[int] = None,
):
self.write_reproducibility_data() self.write_reproducibility_data()
remove_directory = kwargs.get("remove_directory", True)
dirty = kwargs.get("dirty", False)
fail_first = kwargs.get("fail_first", False)
externals = kwargs.get("externals", False)
for spec in self.specs: for spec in self.specs:
try: try:
if spec.package.test_suite: if spec.package.test_suite:
raise TestSuiteSpecError( raise TestSuiteSpecError(
f"Package {spec.package.name} cannot be run in two test suites at once" "Package {} cannot be run in two test suites at once".format(
spec.package.name
)
) )
# Set up the test suite to know which test is running # Set up the test suite to know which test is running
@@ -908,7 +905,7 @@ def __call__(
fs.mkdirp(test_dir) fs.mkdirp(test_dir)
# run the package tests # run the package tests
spec.package.do_test(dirty=dirty, externals=externals, timeout=timeout) spec.package.do_test(dirty=dirty, externals=externals)
# Clean up on success # Clean up on success
if remove_directory: if remove_directory:
@@ -959,12 +956,15 @@ def __call__(
if failures: if failures:
raise TestSuiteFailure(failures) raise TestSuiteFailure(failures)
def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus: def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
"""Returns the overall test results status for the spec. """Determine the overall test results status for the spec.
Args: Args:
spec: instance of the spec under test spec: instance of the spec under test
externals: ``True`` if externals are to be tested, else ``False`` externals: ``True`` if externals are to be tested, else ``False``
Returns:
the spec's test status if available or ``None``
""" """
tests_status_file = self.tested_file_for_spec(spec) tests_status_file = self.tested_file_for_spec(spec)
if not os.path.exists(tests_status_file): if not os.path.exists(tests_status_file):
@@ -981,84 +981,109 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus:
value = (f.read()).strip("\n") value = (f.read()).strip("\n")
return TestStatus(int(value)) if value else TestStatus.NO_TESTS return TestStatus(int(value)) if value else TestStatus.NO_TESTS
def ensure_stage(self) -> None: def ensure_stage(self):
"""Ensure the test suite stage directory exists.""" """Ensure the test suite stage directory exists."""
if not os.path.exists(self.stage): if not os.path.exists(self.stage):
fs.mkdirp(self.stage) fs.mkdirp(self.stage)
@property @property
def stage(self) -> Prefix: def stage(self):
"""The root test suite stage directory""" """The root test suite stage directory.
Returns:
str: the spec's test stage directory path
"""
if not self._stage: if not self._stage:
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash)) self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
return self._stage return self._stage
@stage.setter @stage.setter
def stage(self, value: Union[Prefix, str]) -> None: def stage(self, value):
"""Set the value of a non-default stage directory.""" """Set the value of a non-default stage directory."""
self._stage = value if isinstance(value, Prefix) else Prefix(value) self._stage = value if isinstance(value, Prefix) else Prefix(value)
@property @property
def results_file(self) -> Prefix: def results_file(self):
"""The path to the results summary file.""" """The path to the results summary file."""
return self.stage.join(results_filename) return self.stage.join(results_filename)
@classmethod @classmethod
def test_pkg_id(cls, spec: Spec) -> str: def test_pkg_id(cls, spec):
"""The standard install test package identifier. """The standard install test package identifier.
Args: Args:
spec: instance of the spec under test spec: instance of the spec under test
Returns:
str: the install test package identifier
""" """
return spec.format_path("{name}-{version}-{hash:7}") return spec.format_path("{name}-{version}-{hash:7}")
@classmethod @classmethod
def test_log_name(cls, spec: Spec) -> str: def test_log_name(cls, spec):
"""The standard log filename for a spec. """The standard log filename for a spec.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
"""
return f"{cls.test_pkg_id(spec)}-test-out.txt"
def log_file_for_spec(self, spec: Spec) -> Prefix: Returns:
str: the spec's log filename
"""
return "%s-test-out.txt" % cls.test_pkg_id(spec)
def log_file_for_spec(self, spec):
"""The test log file path for the provided spec. """The test log file path for the provided spec.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
Returns:
str: the path to the spec's log file
""" """
return self.stage.join(self.test_log_name(spec)) return self.stage.join(self.test_log_name(spec))
def test_dir_for_spec(self, spec: Spec) -> Prefix: def test_dir_for_spec(self, spec):
"""The path to the test stage directory for the provided spec. """The path to the test stage directory for the provided spec.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
Returns:
str: the spec's test stage directory path
""" """
return Prefix(self.stage.join(self.test_pkg_id(spec))) return Prefix(self.stage.join(self.test_pkg_id(spec)))
@classmethod @classmethod
def tested_file_name(cls, spec: Spec) -> str: def tested_file_name(cls, spec):
"""The standard test status filename for the spec. """The standard test status filename for the spec.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
Returns:
str: the spec's test status filename
""" """
return "%s-tested.txt" % cls.test_pkg_id(spec) return "%s-tested.txt" % cls.test_pkg_id(spec)
def tested_file_for_spec(self, spec: Spec) -> str: def tested_file_for_spec(self, spec):
"""The test status file path for the spec. """The test status file path for the spec.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
Returns:
str: the spec's test status file path
""" """
return fs.join_path(self.stage, self.tested_file_name(spec)) return fs.join_path(self.stage, self.tested_file_name(spec))
@property @property
def current_test_cache_dir(self) -> str: def current_test_cache_dir(self):
"""Path to the test stage directory where the current spec's cached """Path to the test stage directory where the current spec's cached
build-time files were automatically copied. build-time files were automatically copied.
Returns:
str: path to the current spec's staged, cached build-time files.
Raises: Raises:
TestSuiteSpecError: If there is no spec being tested TestSuiteSpecError: If there is no spec being tested
""" """
@@ -1070,10 +1095,13 @@ def current_test_cache_dir(self) -> str:
return self.test_dir_for_spec(base_spec).cache.join(test_spec.name) return self.test_dir_for_spec(base_spec).cache.join(test_spec.name)
@property @property
def current_test_data_dir(self) -> str: def current_test_data_dir(self):
"""Path to the test stage directory where the current spec's custom """Path to the test stage directory where the current spec's custom
package (data) files were automatically copied. package (data) files were automatically copied.
Returns:
str: path to the current spec's staged, custom package (data) files
Raises: Raises:
TestSuiteSpecError: If there is no spec being tested TestSuiteSpecError: If there is no spec being tested
""" """
@@ -1084,17 +1112,17 @@ def current_test_data_dir(self) -> str:
base_spec = self.current_base_spec base_spec = self.current_base_spec
return self.test_dir_for_spec(base_spec).data.join(test_spec.name) return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
def write_test_result(self, spec: Spec, result: TestStatus) -> None: def write_test_result(self, spec, result):
"""Write the spec's test result to the test suite results file. """Write the spec's test result to the test suite results file.
Args: Args:
spec: instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
result: result from the spec's test execution (e.g, PASSED) result (str): result from the spec's test execution (e.g, PASSED)
""" """
msg = f"{self.test_pkg_id(spec)} {result}" msg = f"{self.test_pkg_id(spec)} {result}"
_add_msg_to_file(self.results_file, msg) _add_msg_to_file(self.results_file, msg)
def write_reproducibility_data(self) -> None: def write_reproducibility_data(self):
for spec in self.specs: for spec in self.specs:
repo_cache_path = self.stage.repo.join(spec.name) repo_cache_path = self.stage.repo.join(spec.name)
spack.repo.PATH.dump_provenance(spec, repo_cache_path) spack.repo.PATH.dump_provenance(spec, repo_cache_path)
@@ -1139,12 +1167,12 @@ def from_dict(d):
return TestSuite(specs, alias) return TestSuite(specs, alias)
@staticmethod @staticmethod
def from_file(filename: str) -> "TestSuite": def from_file(filename):
"""Instantiate a TestSuite using the specs and optional alias """Instantiate a TestSuite using the specs and optional alias
provided in the given file. provided in the given file.
Args: Args:
filename: The path to the JSON file containing the test filename (str): The path to the JSON file containing the test
suite specs and optional alias. suite specs and optional alias.
Raises: Raises:

View File

@@ -65,6 +65,7 @@
import spack.util.executable import spack.util.executable
import spack.util.path import spack.util.path
import spack.util.timer as timer import spack.util.timer as timer
from spack.traverse import CoverNodesVisitor, traverse_breadth_first_with_visitor
from spack.util.environment import EnvironmentModifications, dump_environment from spack.util.environment import EnvironmentModifications, dump_environment
from spack.util.executable import which from spack.util.executable import which
@@ -118,6 +119,11 @@ class ExecuteResult(enum.Enum):
FAILED = enum.auto() FAILED = enum.auto()
# Task is missing build spec and will be requeued # Task is missing build spec and will be requeued
MISSING_BUILD_SPEC = enum.auto() MISSING_BUILD_SPEC = enum.auto()
# Task is queued to install from binary but no binary found
MISSING_BINARY = enum.auto()
requeue_results = [ExecuteResult.MISSING_BUILD_SPEC, ExecuteResult.MISSING_BINARY]
class InstallAction(enum.Enum): class InstallAction(enum.Enum):
@@ -129,22 +135,46 @@ class InstallAction(enum.Enum):
OVERWRITE = enum.auto() OVERWRITE = enum.auto()
class InstallStatus: class InstallerProgress:
def __init__(self, pkg_count: int): """Installation progress tracker"""
# Counters used for showing status information
self.pkg_num: int = 0 def __init__(self, packages: List["spack.package_base.PackageBase"]):
self.pkg_count: int = pkg_count self.counter = SpecsCount(dt.BUILD | dt.LINK | dt.RUN)
self.pkg_count: int = self.counter.total([pkg.spec for pkg in packages])
self.pkg_ids: Set[str] = set() self.pkg_ids: Set[str] = set()
self.pkg_num: int = 0
self.add_progress: bool = spack.config.get("config:install_status", True)
def next_pkg(self, pkg: "spack.package_base.PackageBase"): def set_installed(self, pkg: "spack.package_base.PackageBase", message: str) -> None:
"""
Flag package as installed and output the installation status if
enabled by config:install_status.
Args:
pkg: installed package
message: message to be output
"""
pkg_id = package_id(pkg.spec) pkg_id = package_id(pkg.spec)
if pkg_id not in self.pkg_ids: if pkg_id not in self.pkg_ids:
self.pkg_num += 1
self.pkg_ids.add(pkg_id) self.pkg_ids.add(pkg_id)
visited = max(len(self.pkg_ids), self.counter.total([pkg.spec]), self.pkg_num + 1)
self.pkg_num = visited
if tty.msg_enabled():
post = self.get_progress() if self.add_progress else ""
print(
colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message) + f" {post}"
)
self.set_term_title("Installed")
def set_term_title(self, text: str): def set_term_title(self, text: str):
if not spack.config.get("config:install_status", True): """Update the terminal title bar.
Args:
text: message to output in the terminal title bar
"""
if not self.add_progress:
return return
if not sys.stdout.isatty(): if not sys.stdout.isatty():
@@ -155,7 +185,11 @@ def set_term_title(self, text: str):
sys.stdout.flush() sys.stdout.flush()
def get_progress(self) -> str: def get_progress(self) -> str:
return f"[{self.pkg_num}/{self.pkg_count}]" """Current installation progress
Returns: string showing the current installation progress
"""
return f"[{self.pkg_num}/{self.pkg_count} completed]"
class TermStatusLine: class TermStatusLine:
@@ -224,7 +258,9 @@ def _check_last_phase(pkg: "spack.package_base.PackageBase") -> None:
pkg.last_phase = None # type: ignore[attr-defined] pkg.last_phase = None # type: ignore[attr-defined]
def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explicit: bool) -> bool: def _handle_external_and_upstream(
pkg: "spack.package_base.PackageBase", explicit: bool, progress: InstallerProgress
) -> bool:
""" """
Determine if the package is external or upstream and register it in the Determine if the package is external or upstream and register it in the
database if it is external package. database if it is external package.
@@ -232,6 +268,8 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
Args: Args:
pkg: the package whose installation is under consideration pkg: the package whose installation is under consideration
explicit: the package was explicitly requested by the user explicit: the package was explicitly requested by the user
progress: installation progress tracker
Return: Return:
``True`` if the package is not to be installed locally, otherwise ``False`` ``True`` if the package is not to be installed locally, otherwise ``False``
""" """
@@ -239,7 +277,7 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
# consists in module file generation and registration in the DB. # consists in module file generation and registration in the DB.
if pkg.spec.external: if pkg.spec.external:
_process_external_package(pkg, explicit) _process_external_package(pkg, explicit)
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})") progress.set_installed(pkg, f"{pkg.prefix} (external {package_id(pkg.spec)})")
return True return True
if pkg.spec.installed_upstream: if pkg.spec.installed_upstream:
@@ -247,7 +285,7 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at " f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
f"{pkg.spec.prefix}" f"{pkg.spec.prefix}"
) )
_print_installed_pkg(pkg.prefix) progress.set_installed(pkg, pkg.prefix)
# This will result in skipping all post-install hooks. In the case # This will result in skipping all post-install hooks. In the case
# of modules this is considered correct because we want to retrieve # of modules this is considered correct because we want to retrieve
@@ -323,17 +361,6 @@ def _log_prefix(pkg_name) -> str:
return f"{pid}{pkg_name}:" return f"{pid}{pkg_name}:"
def _print_installed_pkg(message: str) -> None:
"""
Output a message with a package icon.
Args:
message (str): message to be output
"""
if tty.msg_enabled():
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None: def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
"""Output install test log file path but only if have test failures. """Output install test log file path but only if have test failures.
@@ -354,13 +381,17 @@ def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None:
def _install_from_cache( def _install_from_cache(
pkg: "spack.package_base.PackageBase", explicit: bool, unsigned: Optional[bool] = False pkg: "spack.package_base.PackageBase",
progress: InstallerProgress,
explicit: bool,
unsigned: Optional[bool] = False,
) -> bool: ) -> bool:
""" """
Install the package from binary cache Install the package from binary cache
Args: Args:
pkg: package to install from the binary cache pkg: package to install from the binary cache
progress: installation status tracker
explicit: ``True`` if installing the package was explicitly explicit: ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False`` requested by the user, otherwise, ``False``
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
@@ -380,7 +411,7 @@ def _install_from_cache(
_write_timer_json(pkg, t, True) _write_timer_json(pkg, t, True)
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t) _print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_installed_pkg(pkg.spec.prefix) progress.set_installed(pkg, pkg.spec.prefix)
spack.hooks.post_install(pkg.spec, explicit) spack.hooks.post_install(pkg.spec, explicit)
return True return True
@@ -566,11 +597,10 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}") tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
# Create a destination repository # Create a destination repository
pkg_api = spack.repo.PATH.get_repo(node.namespace).package_api dest_repo_root = os.path.join(path, node.namespace)
repo_root = os.path.join(path, node.namespace) if pkg_api < (2, 0) else path if not os.path.exists(dest_repo_root):
repo = spack.repo.create_or_construct( spack.repo.create_repo(dest_repo_root)
repo_root, namespace=node.namespace, package_api=pkg_api repo = spack.repo.from_path(dest_repo_root)
)
# Get the location of the package in the dest repo. # Get the location of the package in the dest repo.
dest_pkg_dir = repo.dirname_for_package_name(node.name) dest_pkg_dir = repo.dirname_for_package_name(node.name)
@@ -592,7 +622,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
return [package_id(d) for d in spec.dependents()] return [package_id(d) for d in spec.dependents()]
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str: def install_msg(name: str, pid: int) -> str:
""" """
Colorize the name/id of the package being installed Colorize the name/id of the package being installed
@@ -603,12 +633,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
Return: Colorized installing message Return: Colorized installing message
""" """
pre = f"{pid}: " if tty.show_pid() else "" pre = f"{pid}: " if tty.show_pid() else ""
post = ( return pre + colorize("@*{Installing} @*g{%s}" % (name))
" @*{%s}" % install_status.get_progress()
if install_status and spack.config.get("config:install_status", True)
else ""
)
return pre + colorize("@*{Installing} @*g{%s}%s" % (name, post))
def archive_install_logs(pkg: "spack.package_base.PackageBase", phase_log_dir: str) -> None: def archive_install_logs(pkg: "spack.package_base.PackageBase", phase_log_dir: str) -> None:
@@ -717,6 +742,18 @@ def package_id(spec: "spack.spec.Spec") -> str:
return f"{spec.name}-{spec.version}-{spec.dag_hash()}" return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
class SpecsCount:
def __init__(self, depflag: int):
self.depflag = depflag
def total(self, specs: List["spack.spec.Spec"]):
visitor = CoverNodesVisitor(
spack.spec.DagCountVisitor(self.depflag), key=lambda s: package_id(s)
)
traverse_breadth_first_with_visitor(specs, visitor)
return visitor.visitor.number
class BuildRequest: class BuildRequest:
"""Class for representing an installation request.""" """Class for representing an installation request."""
@@ -807,16 +844,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
depflag = dt.LINK | dt.RUN depflag = dt.LINK | dt.RUN
include_build_deps = self.install_args.get("include_build_deps") include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg.spec): if include_build_deps:
cache_only = self.install_args.get("package_cache_only")
else:
cache_only = self.install_args.get("dependencies_cache_only")
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
):
depflag |= dt.BUILD depflag |= dt.BUILD
if self.run_tests(pkg): if self.run_tests(pkg):
depflag |= dt.TEST depflag |= dt.TEST
@@ -873,7 +901,6 @@ def __init__(
pkg: "spack.package_base.PackageBase", pkg: "spack.package_base.PackageBase",
request: BuildRequest, request: BuildRequest,
*, *,
compiler: bool = False,
start: float = 0.0, start: float = 0.0,
attempts: int = 0, attempts: int = 0,
status: BuildStatus = BuildStatus.QUEUED, status: BuildStatus = BuildStatus.QUEUED,
@@ -968,11 +995,14 @@ def __init__(
self.attempts = attempts self.attempts = attempts
self._update() self._update()
def execute(self, install_status: InstallStatus) -> ExecuteResult: def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""Execute the work of this task. """Execute the work of this task.
The ``install_status`` is an ``InstallStatus`` object used to format progress reporting for Args:
this task in the context of the full ``BuildRequest``.""" progress: installation progress tracker
Returns: execution result
"""
raise NotImplementedError raise NotImplementedError
def __eq__(self, other): def __eq__(self, other):
@@ -1137,33 +1167,26 @@ def priority(self):
class BuildTask(Task): class BuildTask(Task):
"""Class for representing a build task for a package.""" """Class for representing a build task for a package."""
def execute(self, install_status): def execute(self, progress: InstallerProgress) -> ExecuteResult:
""" """
Perform the installation of the requested spec and/or dependency Perform the installation of the requested spec and/or dependency
represented by the build task. represented by the build task.
Args:
progress: installation progress tracker
Returns: execution result
""" """
install_args = self.request.install_args install_args = self.request.install_args
tests = install_args.get("tests") tests = install_args.get("tests", False)
unsigned = install_args.get("unsigned")
pkg, pkg_id = self.pkg, self.pkg_id pkg, pkg_id = self.pkg, self.pkg_id
tty.msg(install_msg(pkg_id, self.pid, install_status)) tty.msg(install_msg(pkg_id, self.pid))
self.start = self.start or time.time() self.start = self.start or time.time()
self.status = BuildStatus.INSTALLING self.status = BuildStatus.INSTALLING
# Use the binary cache if requested pkg.run_tests = tests is True or (tests and pkg.name in tests)
if self.use_cache:
if _install_from_cache(pkg, self.explicit, unsigned):
return ExecuteResult.SUCCESS
elif self.cache_only:
raise spack.error.InstallError(
"No binary found when cache-only was specified", pkg=pkg
)
else:
tty.msg(f"No binary for {pkg_id} found: installing from source")
pkg.run_tests = tests is True or tests and pkg.name in tests
# hook that allows tests to inspect the Package before installation # hook that allows tests to inspect the Package before installation
# see unit_test_check() docs. # see unit_test_check() docs.
@@ -1186,6 +1209,8 @@ def execute(self, install_status):
# Note: PARENT of the build process adds the new package to # Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file. # the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, explicit=self.explicit) spack.store.STORE.db.add(pkg.spec, explicit=self.explicit)
progress.set_installed(self.pkg, self.pkg.prefix)
except spack.error.StopPhase as e: except spack.error.StopPhase as e:
# A StopPhase exception means that do_install was asked to # A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point # stop early from clients, and is not an error at this point
@@ -1195,10 +1220,77 @@ def execute(self, install_status):
return ExecuteResult.SUCCESS return ExecuteResult.SUCCESS
class InstallTask(Task):
"""Class for representing a build task for a package."""
def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
Args:
progress: installation progress tracker
Returns: execution result
"""
# no-op and requeue to build if not allowed to use cache
if not self.use_cache:
return ExecuteResult.MISSING_BINARY
install_args = self.request.install_args
unsigned = install_args.get("unsigned")
pkg, pkg_id = self.pkg, self.pkg_id
tty.msg(install_msg(pkg_id, self.pid))
self.start = self.start or time.time()
self.status = BuildStatus.INSTALLING
try:
if _install_from_cache(pkg, progress, self.explicit, unsigned):
return ExecuteResult.SUCCESS
elif self.cache_only:
raise spack.error.InstallError(
"No binary found when cache-only was specified", pkg=pkg
)
else:
tty.msg(f"No binary for {pkg_id} found: installing from source")
return ExecuteResult.MISSING_BINARY
except binary_distribution.NoChecksumException as exc:
if self.cache_only:
raise
tty.error(
f"Failed to install {self.pkg.name} from binary cache due "
f"to {str(exc)}: Requeueing to install from source."
)
return ExecuteResult.MISSING_BINARY
def build_task(self, installed):
build_task = BuildTask(
pkg=self.pkg,
request=self.request,
start=0,
attempts=self.attempts,
status=BuildStatus.QUEUED,
installed=installed,
)
# Fixup dependents in case it was changed by `add_dependent`
# This would be the case of a `build_spec` for a spliced spec
build_task.dependents = self.dependents
# Same for dependencies
build_task.dependencies = self.dependencies
build_task.uninstalled_deps = self.uninstalled_deps - installed
return build_task
class RewireTask(Task): class RewireTask(Task):
"""Class for representing a rewire task for a package.""" """Class for representing a rewire task for a package."""
def execute(self, install_status): def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""Execute rewire task """Execute rewire task
Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already
@@ -1207,24 +1299,30 @@ def execute(self, install_status):
If not available installed or as binary, return ExecuteResult.MISSING_BUILD_SPEC. If not available installed or as binary, return ExecuteResult.MISSING_BUILD_SPEC.
This will prompt the Installer to requeue the task with a dependency on the BuildTask This will prompt the Installer to requeue the task with a dependency on the BuildTask
to install self.pkg.spec.build_spec to install self.pkg.spec.build_spec
Args:
progress: installation progress tracker
Returns: execution result
""" """
oldstatus = self.status oldstatus = self.status
self.status = BuildStatus.INSTALLING self.status = BuildStatus.INSTALLING
tty.msg(install_msg(self.pkg_id, self.pid, install_status)) tty.msg(install_msg(self.pkg_id, self.pid))
self.start = self.start or time.time() self.start = self.start or time.time()
if not self.pkg.spec.build_spec.installed: if not self.pkg.spec.build_spec.installed:
try: try:
install_args = self.request.install_args install_args = self.request.install_args
unsigned = install_args.get("unsigned") unsigned = install_args.get("unsigned")
_process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned) _process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned)
_print_installed_pkg(self.pkg.prefix) progress.set_installed(self.pkg, self.pkg.prefix)
return ExecuteResult.SUCCESS return ExecuteResult.SUCCESS
except BaseException as e: except BaseException as e:
tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}") tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}")
self.status = oldstatus self.status = oldstatus
return ExecuteResult.MISSING_BUILD_SPEC return ExecuteResult.MISSING_BUILD_SPEC
spack.rewiring.rewire_node(self.pkg.spec, self.explicit) spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
_print_installed_pkg(self.pkg.prefix) progress.set_installed(self.pkg, self.pkg.prefix)
return ExecuteResult.SUCCESS return ExecuteResult.SUCCESS
@@ -1324,6 +1422,9 @@ def __init__(
# Priority queue of tasks # Priority queue of tasks
self.build_pq: List[Tuple[Tuple[int, int], Task]] = [] self.build_pq: List[Tuple[Tuple[int, int], Task]] = []
# Installation status tracker
self.progress: InstallerProgress = InstallerProgress(packages)
# Mapping of unique package ids to task # Mapping of unique package ids to task
self.build_tasks: Dict[str, Task] = {} self.build_tasks: Dict[str, Task] = {}
@@ -1378,8 +1479,9 @@ def _add_init_task(
request: the associated install request request: the associated install request
all_deps: dictionary of all dependencies and associated dependents all_deps: dictionary of all dependencies and associated dependents
""" """
cls = RewireTask if pkg.spec.spliced else BuildTask cls = RewireTask if pkg.spec.spliced else InstallTask
task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed) task: Task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed)
for dep_id in task.dependencies: for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg.spec)) all_deps[dep_id].add(package_id(pkg.spec))
@@ -1672,7 +1774,7 @@ def _requeue_with_build_spec_tasks(self, task):
"""Requeue the task and its missing build spec dependencies""" """Requeue the task and its missing build spec dependencies"""
# Full install of the build_spec is necessary because it didn't already exist somewhere # Full install of the build_spec is necessary because it didn't already exist somewhere
spec = task.pkg.spec spec = task.pkg.spec
for dep in spec.build_spec.traverse(): for dep in spec.build_spec.traverse(deptype=task.request.get_depflags(task.pkg)):
dep_pkg = dep.package dep_pkg = dep.package
dep_id = package_id(dep) dep_id = package_id(dep)
@@ -1695,6 +1797,48 @@ def _requeue_with_build_spec_tasks(self, task):
spec_task.add_dependency(build_pkg_id) spec_task.add_dependency(build_pkg_id)
self._push_task(spec_task) self._push_task(spec_task)
def _requeue_as_build_task(self, task):
# TODO: handle the compile bootstrapping stuff?
spec = task.pkg.spec
build_dep_ids = []
for builddep in spec.dependencies(deptype=dt.BUILD):
# track which package ids are the direct build deps
build_dep_ids.append(package_id(builddep))
for dep in builddep.traverse(deptype=task.request.get_depflags(task.pkg)):
dep_pkg = dep.package
dep_id = package_id(dep)
# Add a new task if we need one
if dep_id not in self.build_tasks and dep_id not in self.installed:
self._add_init_task(dep_pkg, task.request, self.all_dependencies)
# Add edges for an existing task if it exists
elif dep_id in self.build_tasks:
for parent in dep.dependents():
parent_id = package_id(parent)
self.build_tasks[dep_id].add_dependent(parent_id)
# Clear any persistent failure markings _unless_ they
# are associated with another process in this parallel build
spack.store.STORE.failure_tracker.clear(dep, force=False)
# Remove InstallTask
self._remove_task(task.pkg_id)
# New task to build this spec from source
build_task = task.build_task(self.installed)
build_task_id = package_id(spec)
# Attach dependency relationships between spec and build deps
for build_dep_id in build_dep_ids:
if build_dep_id not in self.installed:
build_dep_task = self.build_tasks[build_dep_id]
build_dep_task.add_dependent(build_task_id)
build_task.add_dependency(build_dep_id)
# Add new Task -- this removes the old task as well
self._push_task(build_task)
def _add_tasks(self, request: BuildRequest, all_deps): def _add_tasks(self, request: BuildRequest, all_deps):
"""Add tasks to the priority queue for the given build request. """Add tasks to the priority queue for the given build request.
@@ -1748,19 +1892,55 @@ def _add_tasks(self, request: BuildRequest, all_deps):
fail_fast = bool(request.install_args.get("fail_fast")) fail_fast = bool(request.install_args.get("fail_fast"))
self.fail_fast = self.fail_fast or fail_fast self.fail_fast = self.fail_fast or fail_fast
def _install_task(self, task: Task, install_status: InstallStatus) -> None: def _install_task(self, task: Task) -> ExecuteResult:
""" """
Perform the installation of the requested spec and/or dependency Perform the installation of the requested spec and/or dependency
represented by the task. represented by the task.
Args: Args:
task: the installation task for a package task: the installation task for a package
install_status: the installation status for the package""" """
rc = task.execute(install_status) rc = task.execute(self.progress)
if rc == ExecuteResult.MISSING_BUILD_SPEC: if rc == ExecuteResult.MISSING_BUILD_SPEC:
self._requeue_with_build_spec_tasks(task) self._requeue_with_build_spec_tasks(task)
elif rc == ExecuteResult.MISSING_BINARY:
self._requeue_as_build_task(task)
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
self._update_installed(task) self._update_installed(task)
return rc
def _overwrite_install_task(self, task: Task):
"""
Try to run the install task overwriting the package prefix.
If this fails, try to recover the original install prefix. If that fails
too, mark the spec as uninstalled.
"""
try:
with fs.replace_directory_transaction(task.pkg.prefix):
rc = self._install_task(task)
if rc in requeue_results:
raise Requeue # raise to trigger transactional replacement of directory
except Requeue:
pass # This task is requeueing, not failing
except fs.CouldNotRestoreDirectoryBackup as e:
spack.store.STORE.db.remove(task.pkg.spec)
if isinstance(e.inner_exception, Requeue):
message_fn = tty.warn
else:
message_fn = tty.error
message_fn(
f"Recovery of install dir of {task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
# Unwrap the actuall installation exception
if isinstance(e.inner_exception, Requeue):
tty.warn("Task will be requeued to build from source")
else:
raise e.inner_exception
def _next_is_pri0(self) -> bool: def _next_is_pri0(self) -> bool:
""" """
@@ -1864,7 +2044,7 @@ def _remove_task(self, pkg_id: str) -> Optional[Task]:
else: else:
return None return None
def _requeue_task(self, task: Task, install_status: InstallStatus) -> None: def _requeue_task(self, task: Task) -> None:
""" """
Requeues a task that appears to be in progress by another process. Requeues a task that appears to be in progress by another process.
@@ -1872,10 +2052,7 @@ def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
task (Task): the installation task for a package task (Task): the installation task for a package
""" """
if task.status not in [BuildStatus.INSTALLED, BuildStatus.INSTALLING]: if task.status not in [BuildStatus.INSTALLED, BuildStatus.INSTALLING]:
tty.debug( tty.debug(f"{install_msg(task.pkg_id, self.pid)} in progress by another process")
f"{install_msg(task.pkg_id, self.pid, install_status)} "
"in progress by another process"
)
new_task = task.next_attempt(self.installed) new_task = task.next_attempt(self.installed)
new_task.status = BuildStatus.INSTALLING new_task.status = BuildStatus.INSTALLING
@@ -2021,8 +2198,6 @@ def install(self) -> None:
single_requested_spec = len(self.build_requests) == 1 single_requested_spec = len(self.build_requests) == 1
failed_build_requests = [] failed_build_requests = []
install_status = InstallStatus(len(self.build_pq))
# Only enable the terminal status line when we're in a tty without debug info # Only enable the terminal status line when we're in a tty without debug info
# enabled, so that the output does not get cluttered. # enabled, so that the output does not get cluttered.
term_status = TermStatusLine( term_status = TermStatusLine(
@@ -2038,8 +2213,7 @@ def install(self) -> None:
keep_prefix = install_args.get("keep_prefix") keep_prefix = install_args.get("keep_prefix")
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
install_status.next_pkg(pkg) self.progress.set_term_title(f"Processing {pkg.name}")
install_status.set_term_title(f"Processing {pkg.name}")
tty.debug(f"Processing {pkg_id}: task={task}") tty.debug(f"Processing {pkg_id}: task={task}")
# Ensure that the current spec has NO uninstalled dependencies, # Ensure that the current spec has NO uninstalled dependencies,
# which is assumed to be reflected directly in its priority. # which is assumed to be reflected directly in its priority.
@@ -2068,7 +2242,7 @@ def install(self) -> None:
# Skip the installation if the spec is not being installed locally # Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since # (i.e., if external or upstream) BUT flag it as installed since
# some package likely depends on it. # some package likely depends on it.
if _handle_external_and_upstream(pkg, task.explicit): if _handle_external_and_upstream(pkg, task.explicit, self.progress):
term_status.clear() term_status.clear()
self._flag_installed(pkg, task.dependents) self._flag_installed(pkg, task.dependents)
continue continue
@@ -2089,7 +2263,7 @@ def install(self) -> None:
# another process is likely (un)installing the spec or has # another process is likely (un)installing the spec or has
# determined the spec has already been installed (though the # determined the spec has already been installed (though the
# other process may be hung). # other process may be hung).
install_status.set_term_title(f"Acquiring lock for {pkg.name}") self.progress.set_term_title(f"Acquiring lock for {pkg.name}")
term_status.add(pkg_id) term_status.add(pkg_id)
ltype, lock = self._ensure_locked("write", pkg) ltype, lock = self._ensure_locked("write", pkg)
if lock is None: if lock is None:
@@ -2101,7 +2275,7 @@ def install(self) -> None:
# can check the status presumably established by another process # can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass. # -- failed, installed, or uninstalled -- on the next pass.
if lock is None: if lock is None:
self._requeue_task(task, install_status) self._requeue_task(task)
continue continue
term_status.clear() term_status.clear()
@@ -2112,7 +2286,7 @@ def install(self) -> None:
task.request.overwrite_time = time.time() task.request.overwrite_time = time.time()
# Determine state of installation artifacts and adjust accordingly. # Determine state of installation artifacts and adjust accordingly.
install_status.set_term_title(f"Preparing {pkg.name}") self.progress.set_term_title(f"Preparing {pkg.name}")
self._prepare_for_install(task) self._prepare_for_install(task)
# Flag an already installed package # Flag an already installed package
@@ -2124,7 +2298,7 @@ def install(self) -> None:
if lock is not None: if lock is not None:
self._update_installed(task) self._update_installed(task)
path = spack.util.path.debug_padded_filter(pkg.prefix) path = spack.util.path.debug_padded_filter(pkg.prefix)
_print_installed_pkg(path) self.progress.set_installed(pkg, path)
else: else:
# At this point we've failed to get a write or a read # At this point we've failed to get a write or a read
# lock, which means another process has taken a write # lock, which means another process has taken a write
@@ -2135,7 +2309,7 @@ def install(self) -> None:
# established by the other process -- failed, installed, # established by the other process -- failed, installed,
# or uninstalled -- on the next pass. # or uninstalled -- on the next pass.
self.installed.remove(pkg_id) self.installed.remove(pkg_id)
self._requeue_task(task, install_status) self._requeue_task(task)
continue continue
# Having a read lock on an uninstalled pkg may mean another # Having a read lock on an uninstalled pkg may mean another
@@ -2148,21 +2322,19 @@ def install(self) -> None:
# uninstalled -- on the next pass. # uninstalled -- on the next pass.
if ltype == "read": if ltype == "read":
lock.release_read() lock.release_read()
self._requeue_task(task, install_status) self._requeue_task(task)
continue continue
# Proceed with the installation since we have an exclusive write # Proceed with the installation since we have an exclusive write
# lock on the package. # lock on the package.
install_status.set_term_title(f"Installing {pkg.name}") self.progress.set_term_title(f"Installing {pkg.name}")
try: try:
action = self._install_action(task) action = self._install_action(task)
if action == InstallAction.INSTALL: if action == InstallAction.INSTALL:
self._install_task(task, install_status) self._install_task(task)
elif action == InstallAction.OVERWRITE: elif action == InstallAction.OVERWRITE:
# spack.store.STORE.db is not really a Database object, but a small self._overwrite_install_task(task)
# wrapper -- silence mypy
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
# If we installed then we should keep the prefix # If we installed then we should keep the prefix
stop_before_phase = getattr(pkg, "stop_before_phase", None) stop_before_phase = getattr(pkg, "stop_before_phase", None)
@@ -2177,20 +2349,6 @@ def install(self) -> None:
) )
raise raise
except binary_distribution.NoChecksumException as exc:
if task.cache_only:
raise
# Checking hash on downloaded binary failed.
tty.error(
f"Failed to install {pkg.name} from binary cache due "
f"to {str(exc)}: Requeueing to install from source."
)
# this overrides a full method, which is ugly.
task.use_cache = False # type: ignore[misc]
self._requeue_task(task, install_status)
continue
except (Exception, SystemExit) as exc: except (Exception, SystemExit) as exc:
self._update_failed(task, True, exc) self._update_failed(task, True, exc)
@@ -2226,7 +2384,12 @@ def install(self) -> None:
# Perform basic task cleanup for the installed spec to # Perform basic task cleanup for the installed spec to
# include downgrading the write to a read lock # include downgrading the write to a read lock
if pkg.spec.installed: if pkg.spec.installed:
self._cleanup_task(pkg) # Do not clean up this was an overwrite that wasn't completed
overwrite = spec.dag_hash() in task.request.overwrite
rec = spack.store.STORE.db.get_record(pkg.spec)
incomplete = task.request.overwrite_time > rec.installation_time
if not (overwrite and incomplete):
self._cleanup_task(pkg)
# Cleanup, which includes releasing all of the read locks # Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks() self._cleanup_all_tasks()
@@ -2378,7 +2541,6 @@ def run(self) -> bool:
print_install_test_log(self.pkg) print_install_test_log(self.pkg)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer) _print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
_print_installed_pkg(self.pkg.prefix)
# preserve verbosity across runs # preserve verbosity across runs
return self.echo return self.echo
@@ -2524,39 +2686,22 @@ def deprecate(spec: "spack.spec.Spec", deprecator: "spack.spec.Spec", link_fn) -
link_fn(deprecator.prefix, spec.prefix) link_fn(deprecator.prefix, spec.prefix)
class OverwriteInstall: class Requeue(Exception):
def __init__( """Raised when we need an error to indicate a requeueing situation.
self,
installer: PackageInstaller,
database: spack.database.Database,
task: Task,
install_status: InstallStatus,
):
self.installer = installer
self.database = database
self.task = task
self.install_status = install_status
def install(self): While this is raised and excepted, it does not represent an Error."""
"""
Try to run the install task overwriting the package prefix.
If this fails, try to recover the original install prefix. If that fails
too, mark the spec as uninstalled. This function always the original
install error if installation fails.
"""
try:
with fs.replace_directory_transaction(self.task.pkg.prefix):
self.installer._install_task(self.task, self.install_status)
except fs.CouldNotRestoreDirectoryBackup as e:
self.database.remove(self.task.pkg.spec)
tty.error(
f"Recovery of install dir of {self.task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
# Unwrap the actual installation exception.
raise e.inner_exception class InstallError(spack.error.SpackError):
"""Raised when something goes wrong during install or uninstall.
The error can be annotated with a ``pkg`` attribute to allow the
caller to get the package for which the exception was raised.
"""
def __init__(self, message, long_msg=None, pkg=None):
super().__init__(message, long_msg)
self.pkg = pkg
class BadInstallPhase(spack.error.InstallError): class BadInstallPhase(spack.error.InstallError):

View File

@@ -20,7 +20,6 @@
import signal import signal
import subprocess as sp import subprocess as sp
import sys import sys
import tempfile
import traceback import traceback
import warnings import warnings
from typing import List, Tuple from typing import List, Tuple
@@ -42,7 +41,6 @@
import spack.paths import spack.paths
import spack.platforms import spack.platforms
import spack.repo import spack.repo
import spack.solver.asp
import spack.spec import spack.spec
import spack.store import spack.store
import spack.util.debug import spack.util.debug
@@ -873,8 +871,8 @@ def add_command_line_scopes(
""" """
for i, path in enumerate(command_line_scopes): for i, path in enumerate(command_line_scopes):
name = f"cmd_scope_{i}" name = f"cmd_scope_{i}"
scope = ev.environment_path_scope(name, path) scopes = ev.environment_path_scopes(name, path)
if scope is None: if scopes is None:
if os.path.isdir(path): # directory with config files if os.path.isdir(path): # directory with config files
cfg.push_scope( cfg.push_scope(
spack.config.DirectoryConfigScope(name, path, writable=False), spack.config.DirectoryConfigScope(name, path, writable=False),
@@ -887,7 +885,8 @@ def add_command_line_scopes(
else: else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}") raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM) for scope in scopes:
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
def _main(argv=None): def _main(argv=None):
@@ -1048,10 +1047,6 @@ def main(argv=None):
try: try:
return _main(argv) return _main(argv)
except spack.solver.asp.OutputDoesNotSatisfyInputError as e:
_handle_solver_bug(e)
return 1
except spack.error.SpackError as e: except spack.error.SpackError as e:
tty.debug(e) tty.debug(e)
e.die() # gracefully die on any SpackErrors e.die() # gracefully die on any SpackErrors
@@ -1075,45 +1070,5 @@ def main(argv=None):
return 3 return 3
def _handle_solver_bug(
e: spack.solver.asp.OutputDoesNotSatisfyInputError, out=sys.stderr, root=None
) -> None:
# when the solver outputs specs that do not satisfy the input and spack is used as a command
# line tool, we dump the incorrect output specs to json so users can upload them in bug reports
wrong_output = [(input, output) for input, output in e.input_to_output if output is not None]
no_output = [input for input, output in e.input_to_output if output is None]
if no_output:
tty.error(
"internal solver error: the following specs were not solved:\n - "
+ "\n - ".join(str(s) for s in no_output),
stream=out,
)
if wrong_output:
msg = (
"internal solver error: the following specs were concretized, but do not satisfy the "
"input:\n - "
+ "\n - ".join(str(s) for s, _ in wrong_output)
+ "\n Please report a bug at https://github.com/spack/spack/issues"
)
# try to write the input/output specs to a temporary directory for bug reports
try:
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
files = []
for i, (input, output) in enumerate(wrong_output, start=1):
in_file = os.path.join(tmpdir, f"input-{i}.json")
out_file = os.path.join(tmpdir, f"output-{i}.json")
files.append(in_file)
files.append(out_file)
with open(in_file, "w", encoding="utf-8") as f:
input.to_json(f)
with open(out_file, "w", encoding="utf-8") as f:
output.to_json(f)
msg += " and attach the following files:\n - " + "\n - ".join(files)
except Exception:
msg += "."
tty.error(msg, stream=out)
class SpackCommandError(Exception): class SpackCommandError(Exception):
"""Raised when SpackCommand execution fails.""" """Raised when SpackCommand execution fails."""

View File

@@ -162,7 +162,6 @@ class tty:
configure: Executable configure: Executable
make_jobs: int make_jobs: int
make: MakeExecutable make: MakeExecutable
nmake: Executable
ninja: MakeExecutable ninja: MakeExecutable
python_include: str python_include: str
python_platlib: str python_platlib: str

View File

@@ -14,6 +14,7 @@
import functools import functools
import glob import glob
import hashlib import hashlib
import importlib
import io import io
import os import os
import re import re
@@ -27,7 +28,7 @@
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import ClassProperty, classproperty, memoized from llnl.util.lang import classproperty, memoized
import spack.config import spack.config
import spack.dependency import spack.dependency
@@ -47,7 +48,6 @@
import spack.url import spack.url
import spack.util.environment import spack.util.environment
import spack.util.executable import spack.util.executable
import spack.util.naming
import spack.util.path import spack.util.path
import spack.util.web import spack.util.web
import spack.variant import spack.variant
@@ -701,10 +701,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
_verbose = None _verbose = None
#: Package homepage where users can find more information about the package #: Package homepage where users can find more information about the package
homepage: ClassProperty[Optional[str]] = None homepage: Optional[str] = None
#: Default list URL (place to find available versions) #: Default list URL (place to find available versions)
list_url: ClassProperty[Optional[str]] = None list_url: Optional[str] = None
#: Link depth to which list_url should be searched for new versions #: Link depth to which list_url should be searched for new versions
list_depth = 0 list_depth = 0
@@ -818,12 +818,12 @@ def package_dir(cls):
@classproperty @classproperty
def module(cls): def module(cls):
"""Module instance that this package class is defined in. """Module object (not just the name) that this package is defined in.
We use this to add variables to package modules. This makes We use this to add variables to package modules. This makes
install() methods easier to write (e.g., can call configure()) install() methods easier to write (e.g., can call configure())
""" """
return sys.modules[cls.__module__] return importlib.import_module(cls.__module__)
@classproperty @classproperty
def namespace(cls): def namespace(cls):
@@ -839,36 +839,26 @@ def fullname(cls):
def fullnames(cls): def fullnames(cls):
"""Fullnames for this package and any packages from which it inherits.""" """Fullnames for this package and any packages from which it inherits."""
fullnames = [] fullnames = []
for base in cls.__mro__: for cls in cls.__mro__:
if not spack.repo.is_package_module(base.__module__): namespace = getattr(cls, "namespace", None)
if namespace:
fullnames.append("%s.%s" % (namespace, cls.name))
if namespace == "builtin":
# builtin packages cannot inherit from other repos
break break
fullnames.append(base.fullname)
return fullnames return fullnames
@classproperty @classproperty
def name(cls): def name(cls):
"""The name of this package.""" """The name of this package.
The name of a package is the name of its Python module, without
the containing module names.
"""
if cls._name is None: if cls._name is None:
# We cannot know the exact package API version, but we can distinguish between v1 cls._name = cls.module.__name__
# v2 based on the module. We don't want to figure out the exact package API version if "." in cls._name:
# since it requires parsing the repo.yaml. cls._name = cls._name[cls._name.rindex(".") + 1 :]
module = cls.__module__
if module.startswith(spack.repo.PKG_MODULE_PREFIX_V1):
version = (1, 0)
elif module.startswith(spack.repo.PKG_MODULE_PREFIX_V2):
version = (2, 0)
else:
raise ValueError(f"Package {cls.__qualname__} is not a known Spack package")
if version < (2, 0):
# spack.pkg.builtin.package_name.
_, _, pkg_module = module.rpartition(".")
else:
# spack_repo.builtin.packages.package_name.package
pkg_module = module.rsplit(".", 2)[-2]
cls._name = spack.util.naming.pkg_dir_to_pkg_name(pkg_module, version)
return cls._name return cls._name
@classproperty @classproperty
@@ -1831,7 +1821,7 @@ def _resource_stage(self, resource):
resource_stage_folder = "-".join(pieces) resource_stage_folder = "-".join(pieces)
return resource_stage_folder return resource_stage_folder
def do_test(self, *, dirty=False, externals=False, timeout: Optional[int] = None): def do_test(self, dirty=False, externals=False):
if self.test_requires_compiler and not any( if self.test_requires_compiler and not any(
lang in self.spec for lang in ("c", "cxx", "fortran") lang in self.spec for lang in ("c", "cxx", "fortran")
): ):
@@ -1849,7 +1839,7 @@ def do_test(self, *, dirty=False, externals=False, timeout: Optional[int] = None
"verbose": tty.is_verbose(), "verbose": tty.is_verbose(),
} }
self.tester.stand_alone_tests(kwargs, timeout=timeout) self.tester.stand_alone_tests(kwargs)
def unit_test_check(self): def unit_test_check(self):
"""Hook for unit tests to assert things about package internals. """Hook for unit tests to assert things about package internals.

View File

@@ -56,9 +56,8 @@
# read-only things in $spack/var/spack # read-only things in $spack/var/spack
repos_path = os.path.join(var_path, "repos") repos_path = os.path.join(var_path, "repos")
test_repos_path = os.path.join(var_path, "test_repos") packages_path = os.path.join(repos_path, "builtin")
packages_path = os.path.join(repos_path, "spack_repo", "builtin") mock_packages_path = os.path.join(repos_path, "builtin.mock")
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
# #
# Writable things in $spack/var/spack # Writable things in $spack/var/spack

View File

@@ -47,34 +47,40 @@
import spack.util.path import spack.util.path
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
PKG_MODULE_PREFIX_V1 = "spack.pkg." #: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
PKG_MODULE_PREFIX_V2 = "spack_repo." ROOT_PYTHON_NAMESPACE = "spack.pkg"
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$") _API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
def is_package_module(fullname: str) -> bool: def python_package_for_repo(namespace):
"""Check if the given module is a package module.""" """Returns the full namespace of a repository, given its relative one
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
For instance:
python_package_for_repo('builtin') == 'spack.pkg.builtin'
Args:
namespace (str): repo namespace
"""
return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
def namespace_from_fullname(fullname: str) -> str: def namespace_from_fullname(fullname):
"""Return the repository namespace only for the full module name. """Return the repository namespace only for the full module name.
For instance: For instance:
namespace_from_fullname("spack.pkg.builtin.hdf5") == "builtin" namespace_from_fullname('spack.pkg.builtin.hdf5') == 'builtin'
namespace_from_fullname("spack_repo.x.y.z.packages.pkg_name.package") == "x.y.z"
Args: Args:
fullname: full name for the Python module fullname (str): full name for the Python module
""" """
if fullname.startswith(PKG_MODULE_PREFIX_V1): namespace, dot, module = fullname.rpartition(".")
namespace, _, _ = fullname.rpartition(".") prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
return namespace[len(PKG_MODULE_PREFIX_V1) :] if namespace.startswith(prefix_and_dot):
elif fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package"): namespace = namespace[len(prefix_and_dot) :]
return ".".join(fullname.split(".")[1:-3]) return namespace
return fullname
class SpackNamespaceLoader: class SpackNamespaceLoader:
@@ -86,14 +92,14 @@ def exec_module(self, module):
class ReposFinder: class ReposFinder:
"""MetaPathFinder class that loads a Python module corresponding to an API v1 Spack package. """MetaPathFinder class that loads a Python module corresponding to a Spack package.
Returns a loader based on the inspection of the current repository list. Returns a loader based on the inspection of the current repository list.
""" """
def __init__(self): def __init__(self):
self._repo_init = _path self._repo_init = _path
self._repo: Optional[RepoType] = None self._repo = None
@property @property
def current_repository(self): def current_repository(self):
@@ -121,7 +127,7 @@ def find_spec(self, fullname, python_path, target=None):
raise RuntimeError('cannot reload module "{0}"'.format(fullname)) raise RuntimeError('cannot reload module "{0}"'.format(fullname))
# Preferred API from https://peps.python.org/pep-0451/ # Preferred API from https://peps.python.org/pep-0451/
if not fullname.startswith(PKG_MODULE_PREFIX_V1) and fullname != "spack.pkg": if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
return None return None
loader = self.compute_loader(fullname) loader = self.compute_loader(fullname)
@@ -129,17 +135,16 @@ def find_spec(self, fullname, python_path, target=None):
return None return None
return importlib.util.spec_from_loader(fullname, loader) return importlib.util.spec_from_loader(fullname, loader)
def compute_loader(self, fullname: str): def compute_loader(self, fullname):
# namespaces are added to repo, and package modules are leaves. # namespaces are added to repo, and package modules are leaves.
namespace, dot, module_name = fullname.rpartition(".") namespace, dot, module_name = fullname.rpartition(".")
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it. # If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
current_repo = self.current_repository is_repo_path = isinstance(self.current_repository, RepoPath)
is_repo_path = isinstance(current_repo, RepoPath)
if is_repo_path: if is_repo_path:
repos = current_repo.repos repos = self.current_repository.repos
else: else:
repos = [current_repo] repos = [self.current_repository]
for repo in repos: for repo in repos:
# We are using the namespace of the repo and the repo contains the package # We are using the namespace of the repo and the repo contains the package
@@ -156,9 +161,7 @@ def compute_loader(self, fullname: str):
# No repo provides the namespace, but it is a valid prefix of # No repo provides the namespace, but it is a valid prefix of
# something in the RepoPath. # something in the RepoPath.
if is_repo_path and current_repo.by_namespace.is_prefix( if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
fullname[len(PKG_MODULE_PREFIX_V1) :]
):
return SpackNamespaceLoader() return SpackNamespaceLoader()
return None return None
@@ -176,12 +179,12 @@ def compute_loader(self, fullname: str):
NOT_PROVIDED = object() NOT_PROVIDED = object()
def builtin_repo() -> "Repo": def packages_path():
"""Get the test repo if it is active, otherwise the builtin repo.""" """Get the test repo if it is active, otherwise the builtin repo."""
try: try:
return PATH.get_repo("builtin.mock") return PATH.get_repo("builtin.mock").packages_path
except UnknownNamespaceError: except UnknownNamespaceError:
return PATH.get_repo("builtin") return PATH.get_repo("builtin").packages_path
class GitExe: class GitExe:
@@ -189,25 +192,24 @@ class GitExe:
# invocations. # invocations.
# #
# Not using -C as that is not supported for git < 1.8.5. # Not using -C as that is not supported for git < 1.8.5.
def __init__(self, packages_path: str): def __init__(self):
self._git_cmd = spack.util.git.git(required=True) self._git_cmd = spack.util.git.git(required=True)
self.packages_dir = packages_path
def __call__(self, *args, **kwargs) -> str: def __call__(self, *args, **kwargs):
with working_dir(self.packages_dir): with working_dir(packages_path()):
return self._git_cmd(*args, **kwargs, output=str) return self._git_cmd(*args, **kwargs)
def list_packages(rev: str, repo: "Repo") -> List[str]: def list_packages(rev):
"""List all packages associated with the given revision""" """List all packages associated with the given revision"""
git = GitExe(repo.packages_path) git = GitExe()
# git ls-tree does not support ... merge-base syntax, so do it manually # git ls-tree does not support ... merge-base syntax, so do it manually
if rev.endswith("..."): if rev.endswith("..."):
ref = rev.replace("...", "") ref = rev.replace("...", "")
rev = git("merge-base", ref, "HEAD").strip() rev = git("merge-base", ref, "HEAD", output=str).strip()
output = git("ls-tree", "-r", "--name-only", rev) output = git("ls-tree", "-r", "--name-only", rev, output=str)
# recursively list the packages directory # recursively list the packages directory
package_paths = [ package_paths = [
@@ -215,54 +217,54 @@ def list_packages(rev: str, repo: "Repo") -> List[str]:
] ]
# take the directory names with one-level-deep package files # take the directory names with one-level-deep package files
package_names = [ package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
nm.pkg_dir_to_pkg_name(line[0], repo.package_api)
for line in package_paths
if len(line) == 2
]
return sorted(set(package_names)) return package_names
def diff_packages(rev1: str, rev2: str, repo: "Repo") -> Tuple[Set[str], Set[str]]: def diff_packages(rev1, rev2):
"""Compute packages lists for the two revisions and return a tuple """Compute packages lists for the two revisions and return a tuple
containing all the packages in rev1 but not in rev2 and all the containing all the packages in rev1 but not in rev2 and all the
packages in rev2 but not in rev1.""" packages in rev2 but not in rev1."""
p1 = set(list_packages(rev1, repo)) p1 = set(list_packages(rev1))
p2 = set(list_packages(rev2, repo)) p2 = set(list_packages(rev2))
return p1.difference(p2), p2.difference(p1) return p1.difference(p2), p2.difference(p1)
def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -> Set[str]: def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
"""Get packages changed, added, or removed (or any combination of those) since a commit. """Show packages changed, added, or removed (or any combination of those)
since a commit.
Arguments: Arguments:
type: String containing one or more of 'A', 'R', 'C' type (str): String containing one or more of 'A', 'R', 'C'
rev1: Revision to compare against, default is 'HEAD^' rev1 (str): Revision to compare against, default is 'HEAD^'
rev2: Revision to compare to rev1, default is 'HEAD' rev2 (str): Revision to compare to rev1, default is 'HEAD'
Returns:
A set contain names of affected packages.
""" """
lower_type = type.lower() lower_type = type.lower()
if not re.match("^[arc]*$", lower_type): if not re.match("^[arc]*$", lower_type):
tty.die( tty.die(
f"Invalid change type: '{type}'. " "Invald change type: '%s'." % type,
"Can contain only A (added), R (removed), or C (changed)" "Can contain only A (added), R (removed), or C (changed)",
) )
removed, added = diff_packages(rev1, rev2, repo) removed, added = diff_packages(rev1, rev2)
git = GitExe(repo.packages_path) git = GitExe()
out = git("diff", "--relative", "--name-only", rev1, rev2).strip() out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
lines = [] if not out else re.split(r"\s+", out) lines = [] if not out else re.split(r"\s+", out)
changed: Set[str] = set() changed = set()
for path in lines: for path in lines:
dir_name, _, _ = path.partition("/") pkg_name, _, _ = path.partition("/")
pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api)
if pkg_name not in added and pkg_name not in removed: if pkg_name not in added and pkg_name not in removed:
changed.add(pkg_name) changed.add(pkg_name)
packages: Set[str] = set() packages = set()
if "a" in lower_type: if "a" in lower_type:
packages |= added packages |= added
if "r" in lower_type: if "r" in lower_type:
@@ -273,14 +275,14 @@ def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -
return packages return packages
def add_package_to_git_stage(packages: List[str], repo: "Repo") -> None: def add_package_to_git_stage(packages):
"""add a package to the git stage with `git add`""" """add a package to the git stage with `git add`"""
git = GitExe(repo.packages_path) git = GitExe()
for pkg_name in packages: for pkg_name in packages:
filename = PATH.filename_for_package_name(pkg_name) filename = PATH.filename_for_package_name(pkg_name)
if not os.path.isfile(filename): if not os.path.isfile(filename):
tty.die(f"No such package: {pkg_name}. Path does not exist:", filename) tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
git("add", filename) git("add", filename)
@@ -350,10 +352,9 @@ class FastPackageChecker(collections.abc.Mapping):
#: Global cache, reused by every instance #: Global cache, reused by every instance
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {} _paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
def __init__(self, packages_path: str, package_api: Tuple[int, int]): def __init__(self, packages_path):
# The path of the repository managed by this instance # The path of the repository managed by this instance
self.packages_path = packages_path self.packages_path = packages_path
self.package_api = package_api
# If the cache we need is not there yet, then build it appropriately # If the cache we need is not there yet, then build it appropriately
if packages_path not in self._paths_cache: if packages_path not in self._paths_cache:
@@ -378,38 +379,41 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
# Create a dictionary that will store the mapping between a # Create a dictionary that will store the mapping between a
# package name and its stat info # package name and its stat info
cache: Dict[str, os.stat_result] = {} cache: Dict[str, os.stat_result] = {}
with os.scandir(self.packages_path) as entries: for pkg_name in os.listdir(self.packages_path):
for entry in entries: # Skip non-directories in the package root.
# Construct the file name from the directory pkg_dir = os.path.join(self.packages_path, pkg_name)
pkg_file = os.path.join(entry.path, package_file_name)
try: # Warn about invalid names that look like packages.
sinfo = os.stat(pkg_file) if not nm.valid_module_name(pkg_name):
except OSError as e: if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
if e.errno in (errno.ENOENT, errno.ENOTDIR):
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn(f"Can't read package file {pkg_file}.")
continue
raise e
# If it's not a file, skip it.
if not stat.S_ISREG(sinfo.st_mode):
continue
# Only consider package.py files in directories that are valid module names under
# the current package API
if not nm.valid_module_name(entry.name, self.package_api):
x, y = self.package_api
tty.warn( tty.warn(
f"Package {pkg_file} cannot be used because `{entry.name}` is not a valid " 'Skipping package at {0}. "{1}" is not '
f"Spack package module name for Package API v{x}.{y}." "a valid Spack module name.".format(pkg_dir, pkg_name)
) )
continue continue
# Store the stat info by package name. # Construct the file name from the directory
cache[nm.pkg_dir_to_pkg_name(entry.name, self.package_api)] = sinfo pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
# Use stat here to avoid lots of calls to the filesystem.
try:
sinfo = os.stat(pkg_file)
except OSError as e:
if e.errno == errno.ENOENT:
# No package.py file here.
continue
elif e.errno == errno.EACCES:
tty.warn("Can't read package file %s." % pkg_file)
continue
raise e
# If it's not a file, skip it.
if stat.S_ISDIR(sinfo.st_mode):
continue
# If it is a file, then save the stats under the
# appropriate key
cache[pkg_name] = sinfo
return cache return cache
@@ -684,7 +688,7 @@ def put_first(self, repo: "Repo") -> None:
return return
self.repos.insert(0, repo) self.repos.insert(0, repo)
self.by_namespace[repo.namespace] = repo self.by_namespace[repo.full_namespace] = repo
def put_last(self, repo): def put_last(self, repo):
"""Add repo last in the search path.""" """Add repo last in the search path."""
@@ -696,8 +700,8 @@ def put_last(self, repo):
self.repos.append(repo) self.repos.append(repo)
# don't mask any higher-precedence repos with same namespace # don't mask any higher-precedence repos with same namespace
if repo.namespace not in self.by_namespace: if repo.full_namespace not in self.by_namespace:
self.by_namespace[repo.namespace] = repo self.by_namespace[repo.full_namespace] = repo
def remove(self, repo): def remove(self, repo):
"""Remove a repo from the search path.""" """Remove a repo from the search path."""
@@ -706,9 +710,10 @@ def remove(self, repo):
def get_repo(self, namespace: str) -> "Repo": def get_repo(self, namespace: str) -> "Repo":
"""Get a repository by namespace.""" """Get a repository by namespace."""
if namespace not in self.by_namespace: full_namespace = python_package_for_repo(namespace)
if full_namespace not in self.by_namespace:
raise UnknownNamespaceError(namespace) raise UnknownNamespaceError(namespace)
return self.by_namespace[namespace] return self.by_namespace[full_namespace]
def first_repo(self) -> Optional["Repo"]: def first_repo(self) -> Optional["Repo"]:
"""Get the first repo in precedence order.""" """Get the first repo in precedence order."""
@@ -816,9 +821,10 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
# If the spec already has a namespace, then return the # If the spec already has a namespace, then return the
# corresponding repo if we know about it. # corresponding repo if we know about it.
if namespace: if namespace:
if namespace not in self.by_namespace: fullspace = python_package_for_repo(namespace)
if fullspace not in self.by_namespace:
raise UnknownNamespaceError(namespace, name=name) raise UnknownNamespaceError(namespace, name=name)
return self.by_namespace[namespace] return self.by_namespace[fullspace]
# If there's no namespace, search in the RepoPath. # If there's no namespace, search in the RepoPath.
for repo in self.repos: for repo in self.repos:
@@ -839,15 +845,8 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
return self.repo_for_pkg(spec).get(spec) return self.repo_for_pkg(spec).get(spec)
def python_paths(self) -> List[str]:
"""Return a list of all the Python paths in the repos."""
return [repo.python_path for repo in self.repos if repo.python_path]
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]: def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
"""Find a class for the spec's package and return the class object.""" """Find a class for the spec's package and return the class object."""
for p in self.python_paths():
if p not in sys.path:
sys.path.insert(0, p)
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name) return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
@autospec @autospec
@@ -943,30 +942,6 @@ def _parse_package_api_version(
) )
def _validate_and_normalize_subdir(subdir: Any, root: str, package_api: Tuple[int, int]) -> str:
if not isinstance(subdir, str):
raise BadRepoError(f"Invalid subdirectory '{subdir}' in '{root}'. Must be a string")
if package_api < (2, 0):
return subdir # In v1.x we did not validate subdir names
if subdir in (".", ""):
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Use a symlink packages -> . instead"
)
# Otherwise we expect a directory name (not path) that can be used as a Python module.
if os.sep in subdir:
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Expected a directory name, not a path"
)
if not nm.valid_module_name(subdir, package_api):
raise BadRepoError(
f"Invalid subdirectory '{subdir}' in '{root}'. Must be a valid Python module name"
)
return subdir
class Repo: class Repo:
"""Class representing a package repository in the filesystem. """Class representing a package repository in the filesystem.
@@ -987,8 +962,6 @@ class Repo:
:py:data:`spack.package_api_version`. :py:data:`spack.package_api_version`.
""" """
namespace: str
def __init__( def __init__(
self, self,
root: str, root: str,
@@ -1018,79 +991,32 @@ def check(condition, msg):
# Read configuration and validate namespace # Read configuration and validate namespace
config = self._read_config() config = self._read_config()
self.package_api = _parse_package_api_version(config)
self.subdirectory = _validate_and_normalize_subdir(
config.get("subdirectory", packages_dir_name), root, self.package_api
)
self.packages_path = os.path.join(self.root, self.subdirectory)
check( check(
os.path.isdir(self.packages_path), "namespace" in config,
f"No directory '{self.subdirectory}' found in '{root}'", f"{os.path.join(root, repo_config_name)} must define a namespace.",
) )
# The parent dir of spack_repo/ which should be added to sys.path for api v2.x self.namespace: str = config["namespace"]
self.python_path: Optional[str] = None check(
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
if self.package_api < (2, 0): f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
check( "Namespaces must be valid python identifiers separated by '.'",
"namespace" in config, )
f"{os.path.join(root, repo_config_name)} must define a namespace.",
)
self.namespace = config["namespace"]
# Note: for Package API v1.x the namespace validation always had bugs, which won't be
# fixed for compatibility reasons. The regex is missing "$" at the end, and it claims
# to test for valid identifiers, but fails to split on `.` first.
check(
isinstance(self.namespace, str)
and re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
"Namespaces must be valid python identifiers separated by '.'",
)
else:
# From Package API v2.0 the namespace follows from the directory structure.
check(
f"{os.sep}spack_repo{os.sep}" in self.root,
f"Invalid repository path '{self.root}'. "
f"Path must contain 'spack_repo{os.sep}'",
)
derived_namespace = self.root.rpartition(f"spack_repo{os.sep}")[2].replace(os.sep, ".")
if "namespace" in config:
self.namespace = config["namespace"]
check(
isinstance(self.namespace, str) and self.namespace == derived_namespace,
f"Namespace '{self.namespace}' should be {derived_namespace} or omitted in "
f"{os.path.join(root, repo_config_name)}",
)
else:
self.namespace = derived_namespace
# strip the namespace directories from the root path to get the python path
# e.g. /my/pythonpath/spack_repo/x/y/z -> /my/pythonpath
python_path = self.root
for _ in self.namespace.split("."):
python_path = os.path.dirname(python_path)
self.python_path = os.path.dirname(python_path)
# check that all subdirectories are valid module names
check(
all(nm.valid_module_name(x, self.package_api) for x in self.namespace.split(".")),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'",
)
# Set up 'full_namespace' to include the super-namespace # Set up 'full_namespace' to include the super-namespace
if self.package_api < (2, 0): self.full_namespace = python_package_for_repo(self.namespace)
self.full_namespace = f"{PKG_MODULE_PREFIX_V1}{self.namespace}"
elif self.subdirectory == ".":
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}"
else:
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}.{self.subdirectory}"
# Keep name components around for checking prefixes. # Keep name components around for checking prefixes.
self._names = self.full_namespace.split(".") self._names = self.full_namespace.split(".")
packages_dir: str = config.get("subdirectory", packages_dir_name)
self.packages_path = os.path.join(self.root, packages_dir)
check(
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
)
self.package_api = _parse_package_api_version(config)
# Class attribute overrides by package name # Class attribute overrides by package name
self.overrides = overrides or {} self.overrides = overrides or {}
@@ -1104,36 +1030,27 @@ def check(condition, msg):
self._repo_index: Optional[RepoIndex] = None self._repo_index: Optional[RepoIndex] = None
self._cache = cache self._cache = cache
@property
def package_api_str(self) -> str:
return f"v{self.package_api[0]}.{self.package_api[1]}"
def finder(self, value: RepoPath) -> None: def finder(self, value: RepoPath) -> None:
self._finder = value self._finder = value
def real_name(self, import_name: str) -> Optional[str]: def real_name(self, import_name: str) -> Optional[str]:
"""Allow users to import Spack packages using Python identifiers. """Allow users to import Spack packages using Python identifiers.
In Package API v1.x, there was no canonical module name for a package, and package's dir A python identifier might map to many different Spack package
was not necessarily a valid Python module name. For that case we have to guess the actual names due to hyphen/underscore ambiguity.
package directory. From Package API v2.0 there is a one-to-one mapping between Spack
package names and Python module names, so there is no guessing.
For Packge API v1.x we support the following one-to-many mappings: Easy example:
num3proxy -> 3proxy num3proxy -> 3proxy
Ambiguous:
foo_bar -> foo_bar, foo-bar foo_bar -> foo_bar, foo-bar
More ambiguous:
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
""" """
if self.package_api >= (2, 0):
if nm.pkg_dir_to_pkg_name(import_name, package_api=self.package_api) in self:
return import_name
return None
if import_name in self: if import_name in self:
return import_name return import_name
# For v1 generate the possible package names from a module name, and return the first
# package name that exists in this repo.
options = nm.possible_spack_module_names(import_name) options = nm.possible_spack_module_names(import_name)
try: try:
options.remove(import_name) options.remove(import_name)
@@ -1266,9 +1183,7 @@ def extensions_for(
def dirname_for_package_name(self, pkg_name: str) -> str: def dirname_for_package_name(self, pkg_name: str) -> str:
"""Given a package name, get the directory containing its package.py file.""" """Given a package name, get the directory containing its package.py file."""
_, unqualified_name = self.partition_package_name(pkg_name) _, unqualified_name = self.partition_package_name(pkg_name)
return os.path.join( return os.path.join(self.packages_path, unqualified_name)
self.packages_path, nm.pkg_name_to_pkg_dir(unqualified_name, self.package_api)
)
def filename_for_package_name(self, pkg_name: str) -> str: def filename_for_package_name(self, pkg_name: str) -> str:
"""Get the filename for the module we should load for a particular """Get the filename for the module we should load for a particular
@@ -1285,7 +1200,7 @@ def filename_for_package_name(self, pkg_name: str) -> str:
@property @property
def _pkg_checker(self) -> FastPackageChecker: def _pkg_checker(self) -> FastPackageChecker:
if self._fast_package_checker is None: if self._fast_package_checker is None:
self._fast_package_checker = FastPackageChecker(self.packages_path, self.package_api) self._fast_package_checker = FastPackageChecker(self.packages_path)
return self._fast_package_checker return self._fast_package_checker
def all_package_names(self, include_virtuals: bool = False) -> List[str]: def all_package_names(self, include_virtuals: bool = False) -> List[str]:
@@ -1297,9 +1212,7 @@ def all_package_names(self, include_virtuals: bool = False) -> List[str]:
def package_path(self, name: str) -> str: def package_path(self, name: str) -> str:
"""Get path to package.py file for this repo.""" """Get path to package.py file for this repo."""
return os.path.join( return os.path.join(self.packages_path, name, package_file_name)
self.packages_path, nm.pkg_name_to_pkg_dir(name, self.package_api), package_file_name
)
def all_package_paths(self) -> Generator[str, None, None]: def all_package_paths(self) -> Generator[str, None, None]:
for name in self.all_package_names(): for name in self.all_package_names():
@@ -1357,19 +1270,15 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
package. Then extracts the package class from the module package. Then extracts the package class from the module
according to Spack's naming convention. according to Spack's naming convention.
""" """
_, pkg_name = self.partition_package_name(pkg_name) namespace, pkg_name = self.partition_package_name(pkg_name)
fullname = f"{self.full_namespace}.{nm.pkg_name_to_pkg_dir(pkg_name, self.package_api)}" class_name = nm.mod_to_class(pkg_name)
if self.package_api >= (2, 0): fullname = f"{self.full_namespace}.{pkg_name}"
fullname += ".package"
class_name = nm.pkg_name_to_class_name(pkg_name)
if self.python_path and self.python_path not in sys.path:
sys.path.insert(0, self.python_path)
try: try:
with REPOS_FINDER.switch_repo(self._finder or self): with REPOS_FINDER.switch_repo(self._finder or self):
module = importlib.import_module(fullname) module = importlib.import_module(fullname)
except ImportError as e: except ImportError:
raise UnknownPackageError(fullname) from e raise UnknownPackageError(fullname)
except Exception as e: except Exception as e:
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}" msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
raise RepoError(msg) from e raise RepoError(msg) from e
@@ -1460,71 +1369,46 @@ def partition_package_name(pkg_name: str) -> Tuple[str, str]:
return namespace, pkg_name return namespace, pkg_name
def get_repo_yaml_dir( def create_repo(root, namespace=None, subdir=packages_dir_name):
root: str, namespace: Optional[str], package_api: Tuple[int, int]
) -> Tuple[str, str]:
"""Returns the directory where repo.yaml is located and the effective namespace."""
if package_api < (2, 0):
namespace = namespace or os.path.basename(root)
# This ad-hoc regex is left for historical reasons, and should not have a breaking change.
if not re.match(r"\w[\.\w-]*", namespace):
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace.")
return root, namespace
# Package API v2 has <root>/spack_repo/<namespace>/<subdir> structure and requires a namespace
if namespace is None:
raise InvalidNamespaceError("Namespace must be provided.")
# if namespace has dots those translate to subdirs of further namespace packages.
namespace_components = namespace.split(".")
if not all(nm.valid_module_name(n, package_api=package_api) for n in namespace_components):
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace." % namespace)
return os.path.join(root, "spack_repo", *namespace_components), namespace
def create_repo(
root,
namespace: Optional[str] = None,
subdir: str = packages_dir_name,
package_api: Tuple[int, int] = spack.package_api_version,
) -> Tuple[str, str]:
"""Create a new repository in root with the specified namespace. """Create a new repository in root with the specified namespace.
If the namespace is not provided, use basename of root. If the namespace is not provided, use basename of root.
Return the canonicalized path and namespace of the created repository. Return the canonicalized path and namespace of the created repository.
""" """
root = spack.util.path.canonicalize_path(root) root = spack.util.path.canonicalize_path(root)
repo_yaml_dir, namespace = get_repo_yaml_dir(os.path.abspath(root), namespace, package_api) if not namespace:
namespace = os.path.basename(root)
existed = True if not re.match(r"\w[\.\w-]*", namespace):
try: raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
dir_entry = next(os.scandir(repo_yaml_dir), None)
except OSError as e:
if e.errno == errno.ENOENT:
existed = False
dir_entry = None
else:
raise BadRepoError(f"Cannot create new repo in {root}: {e}")
if dir_entry is not None: existed = False
raise BadRepoError(f"Cannot create new repo in {root}: directory is not empty.") if os.path.exists(root):
if os.path.isfile(root):
raise BadRepoError("File %s already exists and is not a directory" % root)
elif os.path.isdir(root):
if not os.access(root, os.R_OK | os.W_OK):
raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
if os.listdir(root):
raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
existed = True
config_path = os.path.join(repo_yaml_dir, repo_config_name) full_path = os.path.realpath(root)
parent = os.path.dirname(full_path)
subdir = _validate_and_normalize_subdir(subdir, root, package_api) if not os.access(parent, os.R_OK | os.W_OK):
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
packages_path = os.path.join(repo_yaml_dir, subdir)
try: try:
config_path = os.path.join(root, repo_config_name)
packages_path = os.path.join(root, subdir)
fs.mkdirp(packages_path) fs.mkdirp(packages_path)
with open(config_path, "w", encoding="utf-8") as config: with open(config_path, "w", encoding="utf-8") as config:
config.write("repo:\n") config.write("repo:\n")
config.write(f" namespace: '{namespace}'\n") config.write(f" namespace: '{namespace}'\n")
if subdir != packages_dir_name: if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n") config.write(f" subdirectory: '{subdir}'\n")
x, y = package_api x, y = spack.package_api_version
config.write(f" api: v{x}.{y}\n") config.write(f" api: v{x}.{y}\n")
except OSError as e: except OSError as e:
@@ -1537,27 +1421,22 @@ def create_repo(
raise BadRepoError( raise BadRepoError(
"Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e) "Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
) from e )
return repo_yaml_dir, namespace return full_path, namespace
def from_path(path: str) -> Repo: def from_path(path: str) -> "Repo":
"""Returns a repository from the path passed as input. Injects the global misc cache.""" """Returns a repository from the path passed as input. Injects the global misc cache."""
return Repo(path, cache=spack.caches.MISC_CACHE) return Repo(path, cache=spack.caches.MISC_CACHE)
def create_or_construct( def create_or_construct(path, namespace=None):
root: str,
namespace: Optional[str] = None,
package_api: Tuple[int, int] = spack.package_api_version,
) -> Repo:
"""Create a repository, or just return a Repo if it already exists.""" """Create a repository, or just return a Repo if it already exists."""
repo_yaml_dir, _ = get_repo_yaml_dir(root, namespace, package_api) if not os.path.exists(path):
if not os.path.exists(repo_yaml_dir): fs.mkdirp(path)
fs.mkdirp(root) create_repo(path, namespace)
create_repo(root, namespace=namespace, package_api=package_api) return from_path(path)
return from_path(repo_yaml_dir)
def _path(configuration=None): def _path(configuration=None):
@@ -1635,10 +1514,8 @@ class MockRepositoryBuilder:
"""Build a mock repository in a directory""" """Build a mock repository in a directory"""
def __init__(self, root_directory, namespace=None): def __init__(self, root_directory, namespace=None):
namespace = namespace or "".join(random.choice(string.ascii_lowercase) for _ in range(10)) namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
repo_root = os.path.join(root_directory, namespace) self.root, self.namespace = create_repo(str(root_directory), namespace)
os.mkdir(repo_root)
self.root, self.namespace = create_repo(repo_root, namespace)
def add_package(self, name, dependencies=None): def add_package(self, name, dependencies=None):
"""Create a mock package in the repository, using a Jinja2 template. """Create a mock package in the repository, using a Jinja2 template.
@@ -1650,7 +1527,7 @@ def add_package(self, name, dependencies=None):
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used. ``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
""" """
dependencies = dependencies or [] dependencies = dependencies or []
context = {"cls_name": nm.pkg_name_to_class_name(name), "dependencies": dependencies} context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt") template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
text = template.render(context) text = template.render(context)
package_py = self.recipe_filename(name) package_py = self.recipe_filename(name)
@@ -1662,10 +1539,8 @@ def remove(self, name):
package_py = self.recipe_filename(name) package_py = self.recipe_filename(name)
shutil.rmtree(os.path.dirname(package_py)) shutil.rmtree(os.path.dirname(package_py))
def recipe_filename(self, name: str): def recipe_filename(self, name):
return os.path.join( return os.path.join(self.root, "packages", name, "package.py")
self.root, "packages", nm.pkg_name_to_pkg_dir(name, package_api=(2, 0)), "package.py"
)
class RepoError(spack.error.SpackError): class RepoError(spack.error.SpackError):
@@ -1715,10 +1590,7 @@ def __init__(self, name, repo=None):
# We need to compare the base package name # We need to compare the base package name
pkg_name = name.rsplit(".", 1)[-1] pkg_name = name.rsplit(".", 1)[-1]
try: similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
except Exception:
similar = []
if 1 <= len(similar) <= 5: if 1 <= len(similar) <= 5:
long_msg += "\n\nDid you mean one of the following packages?\n " long_msg += "\n\nDid you mean one of the following packages?\n "

View File

@@ -101,17 +101,26 @@ def wrapper(instance, *args, **kwargs):
# installed explicitly will also be installed as a # installed explicitly will also be installed as a
# dependency of another spec. In this case append to both # dependency of another spec. In this case append to both
# spec reports. # spec reports.
added = []
for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]): for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7)) name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7))
try: try:
item = next((x for x in self.specs if x["name"] == name)) item = next((x for x in self.specs if x["name"] == name))
item["packages"].append(package) item["packages"].append(package)
added.append(item)
except StopIteration: except StopIteration:
pass pass
start_time = time.time() start_time = time.time()
try: try:
value = wrapped_fn(instance, *args, **kwargs) value = wrapped_fn(instance, *args, **kwargs)
# If we are requeuing the task, it neither succeeded nor failed
# remove the package so we don't count it (yet) in either category
if value in spack.installer.requeue_results:
for item in added:
item["packages"].remove(package)
package["stdout"] = self.fetch_log(pkg) package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
self.on_success(pkg, kwargs, package) self.on_success(pkg, kwargs, package)

View File

@@ -100,7 +100,7 @@
"allow_sgid": {"type": "boolean"}, "allow_sgid": {"type": "boolean"},
"install_status": {"type": "boolean"}, "install_status": {"type": "boolean"},
"binary_index_root": {"type": "string"}, "binary_index_root": {"type": "string"},
"url_fetch_method": {"type": "string", "pattern": r"^urllib$|^curl( .*)*"}, "url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}}, "additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
"binary_index_ttl": {"type": "integer", "minimum": 0}, "binary_index_ttl": {"type": "integer", "minimum": 0},
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}}, "aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},

View File

@@ -287,33 +287,9 @@ def specify(spec):
return spack.spec.Spec(spec) return spack.spec.Spec(spec)
def remove_facts( def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
*to_be_removed: str, """Transformation that removes all "node" and "virtual_node" from the input list of facts."""
) -> Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]: return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
"""Returns a transformation function that removes facts from the input list of facts."""
def _remove(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
return list(filter(lambda x: x.args[0] not in to_be_removed, facts))
return _remove
def remove_build_deps(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
build_deps = {x.args[2]: x.args[1] for x in facts if x.args[0] == "depends_on"}
result = []
for x in facts:
current_name = x.args[1]
if current_name in build_deps:
x.name = "build_requirement"
result.append(fn.attr("build_requirement", build_deps[current_name], x))
continue
if x.args[0] == "depends_on":
continue
result.append(x)
return result
def all_libcs() -> Set[spack.spec.Spec]: def all_libcs() -> Set[spack.spec.Spec]:
@@ -1214,7 +1190,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
problem_repr += "\n" + f.read() problem_repr += "\n" + f.read()
result = None result = None
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", False) conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
if conc_cache_enabled: if conc_cache_enabled:
result, concretization_stats = CONC_CACHE.fetch(problem_repr) result, concretization_stats = CONC_CACHE.fetch(problem_repr)
@@ -1311,8 +1287,12 @@ def on_model(model):
result.raise_if_unsat() result.raise_if_unsat()
if result.satisfiable and result.unsolved_specs and setup.concretize_everything: if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
raise OutputDoesNotSatisfyInputError(result.unsolved_specs) unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
)
if conc_cache_enabled: if conc_cache_enabled:
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests) CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
concretization_stats = self.control.statistics concretization_stats = self.control.statistics
@@ -1755,17 +1735,15 @@ def define_variant(
pkg_fact(fn.variant_condition(name, vid, cond_id)) pkg_fact(fn.variant_condition(name, vid, cond_id))
# record type so we can construct the variant when we read it back in # record type so we can construct the variant when we read it back in
self.gen.fact(fn.variant_type(vid, variant_def.variant_type.string)) self.gen.fact(fn.variant_type(vid, variant_def.variant_type.value))
if variant_def.sticky: if variant_def.sticky:
pkg_fact(fn.variant_sticky(vid)) pkg_fact(fn.variant_sticky(vid))
# define defaults for this variant definition # define defaults for this variant definition
if variant_def.multi: defaults = variant_def.make_default().value if variant_def.multi else [variant_def.default]
for val in sorted(variant_def.make_default().values): for val in sorted(defaults):
pkg_fact(fn.variant_default_value_from_package_py(vid, val)) pkg_fact(fn.variant_default_value_from_package_py(vid, val))
else:
pkg_fact(fn.variant_default_value_from_package_py(vid, variant_def.default))
# define possible values for this variant definition # define possible values for this variant definition
values = variant_def.values values = variant_def.values
@@ -1793,9 +1771,7 @@ def define_variant(
# make a spec indicating whether the variant has this conditional value # make a spec indicating whether the variant has this conditional value
variant_has_value = spack.spec.Spec() variant_has_value = spack.spec.Spec()
variant_has_value.variants[name] = vt.VariantValue( variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value)
vt.VariantType.MULTI, name, (value.value,)
)
if value.when: if value.when:
# the conditional value is always "possible", but it imposes its when condition as # the conditional value is always "possible", but it imposes its when condition as
@@ -1908,7 +1884,7 @@ def condition(
if not context: if not context:
context = ConditionContext() context = ConditionContext()
context.transform_imposed = remove_facts("node", "virtual_node") context.transform_imposed = remove_node
if imposed_spec: if imposed_spec:
imposed_name = imposed_spec.name or imposed_name imposed_name = imposed_spec.name or imposed_name
@@ -2008,7 +1984,7 @@ def track_dependencies(input_spec, requirements):
return requirements + [fn.attr("track_dependencies", input_spec.name)] return requirements + [fn.attr("track_dependencies", input_spec.name)]
def dependency_holds(input_spec, requirements): def dependency_holds(input_spec, requirements):
result = remove_facts("node", "virtual_node")(input_spec, requirements) + [ result = remove_node(input_spec, requirements) + [
fn.attr( fn.attr(
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t) "dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
) )
@@ -2198,10 +2174,7 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
pkg_name, ConstraintOrigin.REQUIRE pkg_name, ConstraintOrigin.REQUIRE
) )
if not virtual: if not virtual:
context.transform_required = remove_build_deps context.transform_imposed = remove_node
context.transform_imposed = remove_facts(
"node", "virtual_node", "depends_on"
)
# else: for virtuals we want to emit "node" and # else: for virtuals we want to emit "node" and
# "virtual_node" in imposed specs # "virtual_node" in imposed specs
@@ -2263,18 +2236,16 @@ def external_packages(self):
if pkg_name not in self.pkgs: if pkg_name not in self.pkgs:
continue continue
self.gen.h2(f"External package: {pkg_name}")
# Check if the external package is buildable. If it is # Check if the external package is buildable. If it is
# not then "external(<pkg>)" is a fact, unless we can # not then "external(<pkg>)" is a fact, unless we can
# reuse an already installed spec. # reuse an already installed spec.
external_buildable = data.get("buildable", True) external_buildable = data.get("buildable", True)
externals = data.get("externals", [])
if not external_buildable or externals:
self.gen.h2(f"External package: {pkg_name}")
if not external_buildable: if not external_buildable:
self.gen.fact(fn.buildable_false(pkg_name)) self.gen.fact(fn.buildable_false(pkg_name))
# Read a list of all the specs for this package # Read a list of all the specs for this package
externals = data.get("externals", [])
candidate_specs = [ candidate_specs = [
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
] ]
@@ -2363,8 +2334,6 @@ def preferred_variants(self, pkg_name):
if not preferred_variants: if not preferred_variants:
return return
self.gen.h2(f"Package preferences: {pkg_name}")
for variant_name in sorted(preferred_variants): for variant_name in sorted(preferred_variants):
variant = preferred_variants[variant_name] variant = preferred_variants[variant_name]
@@ -2377,7 +2346,7 @@ def preferred_variants(self, pkg_name):
) )
continue continue
for value in variant.values: for value in variant.value_as_tuple:
for variant_def in variant_defs: for variant_def in variant_defs:
self.variant_values_from_specs.add((pkg_name, id(variant_def), value)) self.variant_values_from_specs.add((pkg_name, id(variant_def), value))
self.gen.fact( self.gen.fact(
@@ -2492,10 +2461,10 @@ def _spec_clauses(
# TODO: variant="*" means 'variant is defined to something', which used to # TODO: variant="*" means 'variant is defined to something', which used to
# be meaningless in concretization, as all variants had to be defined. But # be meaningless in concretization, as all variants had to be defined. But
# now that variants can be conditional, it should force a variant to exist. # now that variants can be conditional, it should force a variant to exist.
if not variant.values: if variant.value == ("*",):
continue continue
for value in variant.values: for value in variant.value_as_tuple:
# ensure that the value *can* be valid for the spec # ensure that the value *can* be valid for the spec
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name): if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
variant_defs = vt.prevalidate_variant_value( variant_defs = vt.prevalidate_variant_value(
@@ -2605,16 +2574,6 @@ def _spec_clauses(
# already-installed concrete specs. # already-installed concrete specs.
if concrete_build_deps or dspec.depflag != dt.BUILD: if concrete_build_deps or dspec.depflag != dt.BUILD:
clauses.append(fn.attr("hash", dep.name, dep.dag_hash())) clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
elif not concrete_build_deps and dspec.depflag:
clauses.append(
fn.attr(
"concrete_build_dependency", spec.name, dep.name, dep.dag_hash()
)
)
for virtual_name in dspec.virtuals:
clauses.append(
fn.attr("virtual_on_build_edge", spec.name, dep.name, virtual_name)
)
# if the spec is abstract, descend into dependencies. # if the spec is abstract, descend into dependencies.
# if it's concrete, then the hashes above take care of dependency # if it's concrete, then the hashes above take care of dependency
@@ -3038,46 +2997,14 @@ def setup(
""" """
reuse = reuse or [] reuse = reuse or []
check_packages_exist(specs) check_packages_exist(specs)
self.gen = ProblemInstanceBuilder()
# Compute possible compilers first, so we can record which dependencies they might inject node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
_ = spack.compilers.config.all_compilers(init_config=True)
# Get compilers from buildcache only if injected through "reuse" specs
supported_compilers = spack.compilers.config.supported_compilers()
compilers_from_reuse = {
x for x in reuse if x.name in supported_compilers and not x.external
}
candidate_compilers, self.rejected_compilers = possible_compilers(
configuration=spack.config.CONFIG
)
for x in candidate_compilers:
if x.external or x in reuse:
continue
reuse.append(x)
for dep in x.traverse(root=False, deptype="run"):
reuse.extend(dep.traverse(deptype=("link", "run")))
candidate_compilers.update(compilers_from_reuse)
self.possible_compilers = list(candidate_compilers)
self.possible_compilers.sort() # type: ignore[call-overload]
self.gen.h1("Runtimes")
injected_dependencies = self.define_runtime_constraints()
node_counter = create_counter(
specs + injected_dependencies, tests=self.tests, possible_graph=self.possible_graph
)
self.possible_virtuals = node_counter.possible_virtuals() self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies() self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var] self.libcs = sorted(all_libcs()) # type: ignore[type-var]
# Fail if we already know an unreachable node is requested # Fail if we already know an unreachable node is requested
for spec in specs: for spec in specs:
# concrete roots don't need their dependencies verified
if spec.concrete:
continue
missing_deps = [ missing_deps = [
str(d) str(d)
for d in spec.traverse() for d in spec.traverse()
@@ -3090,6 +3017,7 @@ def setup(
if node.namespace is not None: if node.namespace is not None:
self.explicitly_required_namespaces[node.name] = node.namespace self.explicitly_required_namespaces[node.name] = node.namespace
self.gen = ProblemInstanceBuilder()
self.gen.h1("Generic information") self.gen.h1("Generic information")
if using_libc_compatibility(): if using_libc_compatibility():
for libc in self.libcs: for libc in self.libcs:
@@ -3118,6 +3046,27 @@ def setup(
specs = tuple(specs) # ensure compatible types to add specs = tuple(specs) # ensure compatible types to add
_ = spack.compilers.config.all_compilers(init_config=True)
# Get compilers from buildcache only if injected through "reuse" specs
supported_compilers = spack.compilers.config.supported_compilers()
compilers_from_reuse = {
x for x in reuse if x.name in supported_compilers and not x.external
}
candidate_compilers, self.rejected_compilers = possible_compilers(
configuration=spack.config.CONFIG
)
for x in candidate_compilers:
if x.external or x in reuse:
continue
reuse.append(x)
for dep in x.traverse(root=False, deptype="run"):
reuse.extend(dep.traverse(deptype=("link", "run")))
candidate_compilers.update(compilers_from_reuse)
self.possible_compilers = list(candidate_compilers)
self.possible_compilers.sort() # type: ignore[call-overload]
self.gen.h1("Reusable concrete specs") self.gen.h1("Reusable concrete specs")
self.define_concrete_input_specs(specs, self.pkgs) self.define_concrete_input_specs(specs, self.pkgs)
if reuse: if reuse:
@@ -3169,6 +3118,7 @@ def setup(
for pkg in sorted(self.pkgs): for pkg in sorted(self.pkgs):
self.gen.h2("Package rules: %s" % pkg) self.gen.h2("Package rules: %s" % pkg)
self.pkg_rules(pkg, tests=self.tests) self.pkg_rules(pkg, tests=self.tests)
self.gen.h2("Package preferences: %s" % pkg)
self.preferred_variants(pkg) self.preferred_variants(pkg)
self.gen.h1("Special variants") self.gen.h1("Special variants")
@@ -3188,6 +3138,9 @@ def setup(
self.gen.h1("Variant Values defined in specs") self.gen.h1("Variant Values defined in specs")
self.define_variant_values() self.define_variant_values()
self.gen.h1("Runtimes")
self.define_runtime_constraints()
self.gen.h1("Version Constraints") self.gen.h1("Version Constraints")
self.collect_virtual_constraints() self.collect_virtual_constraints()
self.define_version_constraints() self.define_version_constraints()
@@ -3221,10 +3174,8 @@ def visit(node):
path = os.path.join(parent_dir, "concretize.lp") path = os.path.join(parent_dir, "concretize.lp")
parse_files([path], visit) parse_files([path], visit)
def define_runtime_constraints(self) -> List[spack.spec.Spec]: def define_runtime_constraints(self):
"""Define the constraints to be imposed on the runtimes, and returns a list of """Define the constraints to be imposed on the runtimes"""
injected packages.
"""
recorder = RuntimePropertyRecorder(self) recorder = RuntimePropertyRecorder(self)
for compiler in self.possible_compilers: for compiler in self.possible_compilers:
@@ -3240,13 +3191,12 @@ def define_runtime_constraints(self) -> List[spack.spec.Spec]:
# FIXME (compiler as nodes): think of using isinstance(compiler_cls, WrappedCompiler) # FIXME (compiler as nodes): think of using isinstance(compiler_cls, WrappedCompiler)
# Add a dependency on the compiler wrapper # Add a dependency on the compiler wrapper
for language in ("c", "cxx", "fortran"): recorder("*").depends_on(
recorder("*").depends_on( "compiler-wrapper",
"compiler-wrapper", when=f"%{compiler.name}@{compiler.versions}",
when=f"%[virtuals={language}] {compiler.name}@{compiler.versions}", type="build",
type="build", description=f"Add the compiler wrapper when using {compiler}",
description=f"Add the compiler wrapper when using {compiler} for {language}", )
)
if not using_libc_compatibility(): if not using_libc_compatibility():
continue continue
@@ -3275,7 +3225,6 @@ def define_runtime_constraints(self) -> List[spack.spec.Spec]:
) )
recorder.consume_facts() recorder.consume_facts()
return sorted(recorder.injected_dependencies)
def literal_specs(self, specs): def literal_specs(self, specs):
for spec in sorted(specs): for spec in sorted(specs):
@@ -3308,13 +3257,15 @@ def literal_specs(self, specs):
# These facts are needed to compute the "condition_set" of the root # These facts are needed to compute the "condition_set" of the root
pkg_name = clause.args[1] pkg_name = clause.args[1]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
elif clause_name == "depends_on":
pkg_name = clause.args[2]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
requirements.append( requirements.append(
fn.attr( fn.attr(
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name "virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
) )
) )
requirements = [x for x in requirements if x.args[0] != "depends_on"]
cache[imposed_spec_key] = (effect_id, requirements) cache[imposed_spec_key] = (effect_id, requirements)
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id))) self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
@@ -3538,7 +3489,6 @@ def __init__(self, setup):
self._setup = setup self._setup = setup
self.rules = [] self.rules = []
self.runtime_conditions = set() self.runtime_conditions = set()
self.injected_dependencies = set()
# State of this object set in the __call__ method, and reset after # State of this object set in the __call__ method, and reset after
# each directive-like method # each directive-like method
self.current_package = None self.current_package = None
@@ -3577,7 +3527,6 @@ def depends_on(self, dependency_str: str, *, when: str, type: str, description:
if dependency_spec.versions != vn.any_version: if dependency_spec.versions != vn.any_version:
self._setup.version_constraints.add((dependency_spec.name, dependency_spec.versions)) self._setup.version_constraints.add((dependency_spec.name, dependency_spec.versions))
self.injected_dependencies.add(dependency_spec)
body_str, node_variable = self.rule_body_from(when_spec) body_str, node_variable = self.rule_body_from(when_spec)
head_clauses = self._setup.spec_clauses(dependency_spec, body=False) head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
@@ -3639,9 +3588,11 @@ def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]:
# (avoid adding virtuals everywhere, if a single edge needs it) # (avoid adding virtuals everywhere, if a single edge needs it)
_, provider, virtual = clause.args _, provider, virtual = clause.args
clause.args = "virtual_on_edge", node_placeholder, provider, virtual clause.args = "virtual_on_edge", node_placeholder, provider, virtual
body_str = ",\n".join(f" {x}" for x in body_clauses) body_str = (
body_str += f",\n not external({node_variable})" f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
body_str = body_str.replace(f'"{node_placeholder}"', f"{node_variable}") f" not external({node_variable}),\n"
f" not runtime(Package)"
).replace(f'"{node_placeholder}"', f"{node_variable}")
for old, replacement in when_substitutions.items(): for old, replacement in when_substitutions.items():
body_str = body_str.replace(old, replacement) body_str = body_str.replace(old, replacement)
return body_str, node_variable return body_str, node_variable
@@ -3731,21 +3682,20 @@ def consume_facts(self):
"""Consume the facts collected by this object, and emits rules and """Consume the facts collected by this object, and emits rules and
facts for the runtimes. facts for the runtimes.
""" """
self._setup.gen.h2("Runtimes: declarations")
runtime_pkgs = sorted(
{x.name for x in self.injected_dependencies if not spack.repo.PATH.is_virtual(x.name)}
)
for runtime_pkg in runtime_pkgs:
self._setup.gen.fact(fn.runtime(runtime_pkg))
self._setup.gen.newline()
self._setup.gen.h2("Runtimes: rules") self._setup.gen.h2("Runtimes: rules")
self._setup.gen.newline() self._setup.gen.newline()
for rule in self.rules: for rule in self.rules:
self._setup.gen.append(rule) self._setup.gen.append(rule)
self._setup.gen.newline()
self._setup.gen.h2("Runtimes: requirements") self._setup.gen.h2("Runtimes: conditions")
for runtime_pkg in spack.repo.PATH.packages_with_tags("runtime"):
self._setup.gen.fact(fn.runtime(runtime_pkg))
self._setup.gen.fact(fn.possible_in_link_run(runtime_pkg))
self._setup.gen.newline()
# Inject version rules for runtimes (versions are declared based
# on the available compilers)
self._setup.pkg_version_rules(runtime_pkg)
for imposed_spec, when_spec in sorted(self.runtime_conditions): for imposed_spec, when_spec in sorted(self.runtime_conditions):
msg = f"{when_spec} requires {imposed_spec} at runtime" msg = f"{when_spec} requires {imposed_spec} at runtime"
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg) _ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
@@ -3832,13 +3782,13 @@ def node_os(self, node, os):
def node_target(self, node, target): def node_target(self, node, target):
self._arch(node).target = target self._arch(node).target = target
def variant_selected(self, node, name: str, value: str, variant_type: str, variant_id): def variant_selected(self, node, name, value, variant_type, variant_id):
spec = self._specs[node] spec = self._specs[node]
variant = spec.variants.get(name) variant = spec.variants.get(name)
if not variant: if not variant:
spec.variants[name] = vt.VariantValue.from_concretizer(name, value, variant_type) spec.variants[name] = vt.VariantType(variant_type).variant_class(name, value)
else: else:
assert variant_type == "multi", ( assert variant_type == vt.VariantType.MULTI.value, (
f"Can't have multiple values for single-valued variant: " f"Can't have multiple values for single-valued variant: "
f"{node}, {name}, {value}, {variant_type}, {variant_id}" f"{node}, {name}, {value}, {variant_type}, {variant_id}"
) )
@@ -3862,17 +3812,6 @@ def external_spec_selected(self, node, idx):
) )
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {}) self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
# Annotate compiler specs from externals
external_spec = spack.spec.Spec(spec_info["spec"])
external_spec_deps = external_spec.dependencies()
if len(external_spec_deps) > 1:
raise InvalidExternalError(
f"external spec {spec_info['spec']} cannot have more than one dependency"
)
elif len(external_spec_deps) == 1:
compiler_str = external_spec_deps[0]
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
# If this is an extension, update the dependencies to include the extendee # If this is an extension, update the dependencies to include the extendee
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node]) package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
extendee_spec = package.extendee_spec extendee_spec = package.extendee_spec
@@ -3891,7 +3830,7 @@ def virtual_on_edge(self, parent_node, provider_node, virtual):
provider_spec = self._specs[provider_node] provider_spec = self._specs[provider_node]
dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)] dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)]
assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}" assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}"
dependencies[0].update_virtuals(virtual) dependencies[0].update_virtuals((virtual,))
def reorder_flags(self): def reorder_flags(self):
"""For each spec, determine the order of compiler flags applied to it. """For each spec, determine the order of compiler flags applied to it.
@@ -4228,10 +4167,10 @@ def _inject_patches_variant(root: spack.spec.Spec) -> None:
continue continue
patches = list(spec_to_patches[id(spec)]) patches = list(spec_to_patches[id(spec)])
variant: vt.VariantValue = spec.variants.setdefault( variant: vt.MultiValuedVariant = spec.variants.setdefault(
"patches", vt.MultiValuedVariant("patches", ()) "patches", vt.MultiValuedVariant("patches", ())
) )
variant.set(*(p.sha256 for p in patches)) variant.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches variant to store patches order # FIXME: Monkey patches variant to store patches order
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key] ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
ordered_hashes.sort() ordered_hashes.sort()
@@ -4699,9 +4638,13 @@ def solve_in_rounds(
break break
if not result.specs: if not result.specs:
# This is also a problem: no specs were solved for, which means we would be in a # This is also a problem: no specs were solved for, which
# loop if we tried again # means we would be in a loop if we tried again
raise OutputDoesNotSatisfyInputError(result.unsolved_specs) unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: a subset of input specs could not"
f" be solved for.\n\t{unsolved_str}"
)
input_specs = list(x for (x, y) in result.unsolved_specs) input_specs = list(x for (x, y) in result.unsolved_specs)
for spec in result.specs: for spec in result.specs:
@@ -4731,19 +4674,6 @@ def __init__(self, msg):
self.constraint_type = None self.constraint_type = None
class OutputDoesNotSatisfyInputError(InternalConcretizerError):
def __init__(
self, input_to_output: List[Tuple[spack.spec.Spec, Optional[spack.spec.Spec]]]
) -> None:
self.input_to_output = input_to_output
super().__init__(
"internal solver error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{Result.format_unsolved(input_to_output)}"
)
class SolverError(InternalConcretizerError): class SolverError(InternalConcretizerError):
"""For cases where the solver is unable to produce a solution. """For cases where the solver is unable to produce a solution.
@@ -4776,7 +4706,3 @@ class InvalidSpliceError(spack.error.SpackError):
class NoCompilerFoundError(spack.error.SpackError): class NoCompilerFoundError(spack.error.SpackError):
"""Raised when there is no possible compiler""" """Raised when there is no possible compiler"""
class InvalidExternalError(spack.error.SpackError):
"""Raised when there is no possible compiler"""

Some files were not shown because too many files have changed in this diff Show More