Compare commits
5 Commits
hs/feature
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
89b4d2a33e | ||
![]() |
546e5a9a54 | ||
![]() |
401c183de9 | ||
![]() |
b757500d9e | ||
![]() |
2b759cf853 |
1
.github/workflows/audit.yaml
vendored
1
.github/workflows/audit.yaml
vendored
@@ -59,6 +59,7 @@ jobs:
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
|
17
.github/workflows/ci.yaml
vendored
17
.github/workflows/ci.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -26,17 +25,13 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||
id: filter
|
||||
with:
|
||||
# For merge group events, compare against the target branch (main)
|
||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
||||
# For merge group events, use the merge group head ref
|
||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
@@ -81,11 +76,10 @@ jobs:
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/prechecks.yml
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
with_packages: ${{ needs.changes.outputs.packages }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
@@ -99,7 +93,7 @@ jobs:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
@@ -107,7 +101,6 @@ jobs:
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
if: ${{ needs.changes.outputs.core }}
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
@@ -120,10 +113,10 @@ jobs:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
|
2
.github/workflows/import-check.yaml
vendored
2
.github/workflows/import-check.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
||||
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
|
@@ -1,8 +1,7 @@
|
||||
black==25.1.0
|
||||
clingo==5.8.0
|
||||
flake8==7.2.0
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250403
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.11.2
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
pylint==3.3.6
|
||||
|
3
.github/workflows/unit_tests.yaml
vendored
3
.github/workflows/unit_tests.yaml
vendored
@@ -19,6 +19,9 @@ jobs:
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-22.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
name: prechecks
|
||||
name: style
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
@@ -6,9 +6,6 @@ on:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
with_packages:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -25,56 +22,43 @@ jobs:
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Verify Added Checksums
|
||||
run: |
|
||||
bin/spack ci verify-versions HEAD^1 HEAD
|
||||
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -102,3 +86,21 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,6 +201,7 @@ tramp
|
||||
|
||||
# Org-mode
|
||||
.org-id-locations
|
||||
*_archive
|
||||
|
||||
# flymake-mode
|
||||
*_flymake.*
|
||||
|
@@ -43,28 +43,6 @@ concretizer:
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
|
||||
# number used if there isn't a more specific alternative
|
||||
max_dupes:
|
||||
default: 1
|
||||
# Virtuals
|
||||
c: 2
|
||||
cxx: 2
|
||||
fortran: 1
|
||||
# Regular packages
|
||||
cmake: 2
|
||||
gmake: 2
|
||||
python: 2
|
||||
python-venv: 2
|
||||
py-cython: 2
|
||||
py-flit-core: 2
|
||||
py-pip: 2
|
||||
py-setuptools: 2
|
||||
py-wheel: 2
|
||||
xcb-proto: 2
|
||||
# Compilers
|
||||
gcc: 2
|
||||
llvm: 2
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
@@ -85,7 +63,3 @@ concretizer:
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
||||
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
|
||||
# cases where there are requirements that prevent part of the search space to be explored.
|
||||
static_analysis: false
|
||||
|
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
|
@@ -15,11 +15,12 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
@@ -49,12 +50,3 @@ packages:
|
||||
# although the version number used here isn't critical
|
||||
- spec: apple-libuuid@1353.100.2
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
c:
|
||||
prefer:
|
||||
- apple-clang
|
||||
cxx:
|
||||
prefer:
|
||||
- apple-clang
|
||||
fortran:
|
||||
prefer:
|
||||
- gcc
|
||||
|
@@ -15,25 +15,25 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc, llvm, intel-oneapi-compilers]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc, llvm, intel-oneapi-compilers]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
golang: [go, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
hip-lang: [llvm-amdgpu]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk]
|
||||
|
@@ -15,11 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
|
@@ -1291,61 +1291,55 @@ based on site policies.
|
||||
Variants
|
||||
^^^^^^^^
|
||||
|
||||
Variants are named options associated with a particular package and are
|
||||
typically used to enable or disable certain features at build time. They
|
||||
are optional, as each package must provide default values for each variant
|
||||
it makes available.
|
||||
|
||||
The names of variants available for a particular package depend on
|
||||
Variants are named options associated with a particular package. They are
|
||||
optional, as each package must provide default values for each variant it
|
||||
makes available. Variants can be specified using
|
||||
a flexible parameter syntax ``name=<value>``. For example,
|
||||
``spack install mercury debug=True`` will install mercury built with debug
|
||||
flags. The names of particular variants available for a package depend on
|
||||
what was provided by the package author. ``spack info <package>`` will
|
||||
provide information on what build variants are available.
|
||||
|
||||
There are different types of variants:
|
||||
For compatibility with earlier versions, variants which happen to be
|
||||
boolean in nature can be specified by a syntax that represents turning
|
||||
options on and off. For example, in the previous spec we could have
|
||||
supplied ``mercury +debug`` with the same effect of enabling the debug
|
||||
compile time option for the libelf package.
|
||||
|
||||
1. Boolean variants. Typically used to enable or disable a feature at
|
||||
compile time. For example, a package might have a ``debug`` variant that
|
||||
can be explicitly enabled with ``+debug`` and disabled with ``~debug``.
|
||||
2. Single-valued variants. Often used to set defaults. For example, a package
|
||||
might have a ``compression`` variant that determines the default
|
||||
compression algorithm, which users could set to ``compression=gzip`` or
|
||||
``compression=zstd``.
|
||||
3. Multi-valued variants. A package might have a ``fabrics`` variant that
|
||||
determines which network fabrics to support. Users could set this to
|
||||
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
|
||||
interfaces. The values are separated by commas.
|
||||
Depending on the package a variant may have any default value. For
|
||||
``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||
with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
|
||||
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||
|
||||
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified
|
||||
fabrics, but other fabrics may be enabled as well. If the intent is to
|
||||
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi``
|
||||
syntax should be used with the ``:=`` operator.
|
||||
There are two syntaxes here because, depending on context, ``~`` and
|
||||
``-`` may mean different things. In most shells, the following will
|
||||
result in the shell performing home directory substitution:
|
||||
|
||||
.. note::
|
||||
.. code-block:: sh
|
||||
|
||||
In certain shells, the the ``~`` character is expanded to the home
|
||||
directory. To avoid these issues, avoid whitespace between the package
|
||||
name and the variant:
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
|
||||
.. code-block:: sh
|
||||
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||
expanded. In this situation, you would want to write ``libelf
|
||||
-debug``. However, ``-`` can be ambiguous when included after a
|
||||
package name without spaces:
|
||||
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
.. code-block:: sh
|
||||
|
||||
Alternatively, you can use the ``-`` character to disable a variant,
|
||||
but be aware that this requires a space between the package name and
|
||||
the variant:
|
||||
mpileaks-debug # wrong!
|
||||
mpileaks -debug # right
|
||||
|
||||
.. code-block:: sh
|
||||
Spack allows the ``-`` character to be part of package names, so the
|
||||
above will be interpreted as a request for the ``mpileaks-debug``
|
||||
package, not a request for ``mpileaks`` built without ``debug``
|
||||
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||
avoid ambiguity.
|
||||
|
||||
mpileaks-debug # wrong: refers to a package named "mpileaks-debug"
|
||||
mpileaks -debug # right: refers to a package named mpileaks with debug disabled
|
||||
|
||||
As a last resort, ``debug=False`` can also be used to disable a boolean variant.
|
||||
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Variant propagation to dependencies
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
When spack normalizes specs, it prints them out with no spaces boolean
|
||||
variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. The ``-`` and spaces on the command
|
||||
line are provided for convenience and legibility.
|
||||
|
||||
Spack allows variants to propagate their value to the package's
|
||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||
@@ -1415,29 +1409,27 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@4.9.3
|
||||
prefix: /opt/gcc
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/gcc/bin/gcc
|
||||
cxx: /opt/gcc/bin/g++
|
||||
fortran: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: gcc@4.9.3
|
||||
paths:
|
||||
cc: /opt/gcc/bin/gcc
|
||||
c++: /opt/gcc/bin/g++
|
||||
f77: /opt/gcc/bin/gfortran
|
||||
fc: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1769,24 +1761,19 @@ Verifying installations
|
||||
The ``spack verify`` command can be used to verify the validity of
|
||||
Spack-installed packages any time after installation.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack verify manifest``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
At installation time, Spack creates a manifest of every file in the
|
||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||
destination. For directories, Spack tracks the mode, and
|
||||
ownership. For files, Spack tracks the mode, ownership, modification
|
||||
time, hash, and size. The ``spack verify manifest`` command will check,
|
||||
for every file in each package, whether any of those attributes have
|
||||
changed. It will also check for newly added files or deleted files from
|
||||
the installation prefix. Spack can either check all installed packages
|
||||
time, hash, and size. The Spack verify command will check, for every
|
||||
file in each package, whether any of those attributes have changed. It
|
||||
will also check for newly added files or deleted files from the
|
||||
installation prefix. Spack can either check all installed packages
|
||||
using the `-a,--all` or accept specs listed on the command line to
|
||||
verify.
|
||||
|
||||
The ``spack verify manifest`` command can also verify for individual files
|
||||
that they haven't been altered since installation time. If the given file
|
||||
The ``spack verify`` command can also verify for individual files that
|
||||
they haven't been altered since installation time. If the given file
|
||||
is not in a Spack installation prefix, Spack will report that it is
|
||||
not owned by any package. To check individual files instead of specs,
|
||||
use the ``-f,--files`` option.
|
||||
@@ -1801,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
|
||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||
machine-readable json data for any errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack verify libraries``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack verify libraries`` command can be used to verify that packages
|
||||
do not have accidental system dependencies. This command scans the install
|
||||
prefixes of packages for executables and shared libraries, and resolves
|
||||
their needed libraries in their RPATHs. When needed libraries cannot be
|
||||
located, an error is reported. This typically indicates that a package
|
||||
was linked against a system library, instead of a library provided by
|
||||
a Spack package.
|
||||
|
||||
This verification can also be enabled as a post-install hook by setting
|
||||
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -34,6 +34,7 @@ an object, with the following keys:
|
||||
|
||||
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||
2. ``from``: list of sources from which reused specs are taken
|
||||
3. ``namespaces``: list of namespaces from which to reuse specs, or the string ``"any"``.
|
||||
|
||||
Each source in ``from`` is itself an object:
|
||||
|
||||
@@ -56,6 +57,7 @@ For instance, the following configuration:
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
namespaces: [builtin]
|
||||
from:
|
||||
- type: local
|
||||
include:
|
||||
@@ -63,7 +65,8 @@ For instance, the following configuration:
|
||||
- "%clang"
|
||||
|
||||
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||
in the local store. Any spec from remote buildcaches is disregarded.
|
||||
in the local store. Any spec from remote buildcaches is disregarded. Any spec from a namespace other than
|
||||
Spack's builtin repo is disregarded.
|
||||
|
||||
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||
``exclude`` options can be pushed up one level:
|
||||
|
@@ -63,6 +63,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
|
@@ -33,6 +33,9 @@ For more information on a specific package, do::
|
||||
|
||||
spack info --all <package-name>
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
now be found in oneAPI.
|
||||
|
||||
Examples
|
||||
========
|
||||
@@ -47,8 +50,34 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so spack can use them::
|
||||
|
||||
To build the ``patchelf`` Spack package with ``icx``, do::
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Note that 2024 and later releases do not include ``icc``. Before 2024,
|
||||
the package layout was different::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||
compilers:
|
||||
|
||||
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||
compilers. 2024 and later releases contain ``ifort``, but not
|
||||
``icc`` and ``icpc``.
|
||||
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||
compilers based on LLVM.
|
||||
|
||||
To build the ``patchelf`` Spack package with ``icc``, do::
|
||||
|
||||
spack install patchelf%intel
|
||||
|
||||
To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
@@ -63,6 +92,15 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
@@ -111,7 +149,7 @@ Compilers
|
||||
---------
|
||||
|
||||
To use the compilers, add some information about the installation to
|
||||
``packages.yaml``. For most users, it is sufficient to do::
|
||||
``compilers.yaml``. For most users, it is sufficient to do::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
||||
|
||||
@@ -119,7 +157,7 @@ Adapt the paths above if you did not install the tools in the default
|
||||
location. After adding the compilers, using them is the same
|
||||
as if you had installed the ``intel-oneapi-compilers`` package.
|
||||
Another option is to manually add the configuration to
|
||||
``packages.yaml`` as described in :ref:`Compiler configuration
|
||||
``compilers.yaml`` as described in :ref:`Compiler configuration
|
||||
<compiler-config>`.
|
||||
|
||||
Before 2024, the directory structure was different::
|
||||
@@ -162,5 +200,15 @@ You can also use Spack-installed libraries. For example::
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with oneMKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
This section describes basic use of oneAPI, especially if it has
|
||||
changed compared to Parallel Studio. See :ref:`intelpackage` for more
|
||||
information on :ref:`intel-virtual-packages`,
|
||||
:ref:`intel-unrelated-packages`,
|
||||
:ref:`intel-integrating-external-libraries`, and
|
||||
:ref:`using-mkl-tips`.
|
||||
|
||||
|
||||
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
||||
|
1077
lib/spack/docs/build_systems/intelpackage.rst
Normal file
1077
lib/spack/docs/build_systems/intelpackage.rst
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,8 @@ The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPacka
|
||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||
packages using GPUs though for AMD in this case.
|
||||
|
||||
You can find the source for this package (and suggestions for setting up your ``packages.yaml`` file) at
|
||||
You can find the source for this package (and suggestions for setting up your
|
||||
``compilers.yaml`` and ``packages.yaml`` files) at
|
||||
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
|
||||
|
||||
^^^^^^^^
|
||||
|
@@ -223,10 +223,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
||||
by default. Can be purged with :ref:`spack clean --downloads
|
||||
<cmd-spack-clean>`.
|
||||
|
||||
.. _Misc Cache:
|
||||
|
||||
--------------------
|
||||
``misc_cache``
|
||||
--------------------
|
||||
@@ -148,16 +146,15 @@ this can expose you to attacks. Use at your own risk.
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``. If additional ``curl`` arguments are required,
|
||||
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
@@ -337,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
||||
|
||||
aliases:
|
||||
inst: install -v
|
||||
|
||||
-------------------------------
|
||||
``concretization_cache:enable``
|
||||
-------------------------------
|
||||
|
||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||
successful concretization runs. When enabled, Spack will check the concretization
|
||||
cache prior to running the solver. If a previous request to solve a given
|
||||
problem is present in the cache, Spack will load the concrete specs and other
|
||||
solver data from the cache rather than running the solver. Specs not previously
|
||||
concretized will be added to the cache on a successful solve. The cache additionally
|
||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||
about the run that produced a given solver result.
|
||||
|
||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||
Cache is cleaned.
|
||||
|
||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||
|
||||
----------------------------
|
||||
``concretization_cache:url``
|
||||
----------------------------
|
||||
|
||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||
paths on the local filesystem.
|
||||
|
||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||
|
||||
------------------------------------
|
||||
``concretization_cache:entry_limit``
|
||||
------------------------------------
|
||||
|
||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
||||
-----------------------------------
|
||||
``concretization_cache:size_limit``
|
||||
-----------------------------------
|
||||
|
||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
@@ -11,10 +11,9 @@ Configuration Files
|
||||
Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
|
||||
* :ref:`packages.yaml <compiler-config>`
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`include.yaml <include-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <packages-config>`
|
||||
@@ -46,12 +45,6 @@ Each Spack configuration file is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
|
||||
.. tip::
|
||||
|
||||
Validation and autocompletion of Spack config files can be enabled in
|
||||
your editor with the YAML language server. See `spack/schemas
|
||||
<https://github.com/spack/schemas>`_ for more information.
|
||||
|
||||
.. _configuration-scopes:
|
||||
|
||||
--------------------
|
||||
@@ -101,7 +94,7 @@ are six configuration scopes. From lowest to highest:
|
||||
precedence over all other scopes.
|
||||
|
||||
Each configuration directory may contain several configuration files,
|
||||
such as ``config.yaml``, ``packages.yaml``, or ``mirrors.yaml``. When
|
||||
such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
|
||||
configurations conflict, settings from higher-precedence scopes override
|
||||
lower-precedence settings.
|
||||
|
||||
|
@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
|
||||
version (and other constraints) passed as the spec argument to the
|
||||
``spack develop`` command.
|
||||
|
||||
When working deep in the graph it is often desirable to have multiple specs marked
|
||||
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
||||
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
||||
to ensure that all the dependents of the initial spec you provide are also marked
|
||||
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
||||
so the graph can be traversed from the supplied spec all the way to the root specs.
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
@@ -667,56 +660,34 @@ a ``packages.yaml`` file) could contain:
|
||||
# ...
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
mpi: [openmpi]
|
||||
compiler: [intel]
|
||||
# ...
|
||||
|
||||
This configuration sets the default mpi provider to be openmpi.
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Included configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml schema.
|
||||
This heading pulls in external configuration files and applies them to
|
||||
the environment.
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include:
|
||||
- environment/relative/path/to/config.yaml
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
- relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- /absolute/path/to/packages.yaml
|
||||
- path: /path/to/$os/$target/environment
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||
|
||||
Files are listed using paths to individual files or directories containing them.
|
||||
Path entries may be absolute or relative to the environment or specified as
|
||||
URLs. URLs to individual files must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file.
|
||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||
supported. Spack-specific, environment and user path variables can be used.
|
||||
(See :ref:`config-file-variables` for more information.)
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
||||
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
|
161
lib/spack/docs/example_files/spack.yaml
Normal file
161
lib/spack/docs/example_files/spack.yaml
Normal file
@@ -0,0 +1,161 @@
|
||||
spack:
|
||||
definitions:
|
||||
- compiler-pkgs:
|
||||
- 'llvm+clang@6.0.1 os=centos7'
|
||||
- 'gcc@6.5.0 os=centos7'
|
||||
- 'llvm+clang@6.0.1 os=ubuntu18.04'
|
||||
- 'gcc@6.5.0 os=ubuntu18.04'
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
# - xsdk@0.4.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- '%gcc@6.5.0'
|
||||
- '%gcc@7.3.0'
|
||||
- '%clang@6.0.0'
|
||||
- '%clang@6.0.1'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$pkgs]
|
||||
- [$compilers]
|
||||
- [$oses]
|
||||
exclude:
|
||||
- '%gcc@7.3.0 os=centos7'
|
||||
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
|
||||
compilers:
|
||||
# The .gitlab-ci.yml for this project picks a Docker container which does
|
||||
# not have any compilers pre-built and ready to use, so we need to fake the
|
||||
# existence of those here.
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@5.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@7.3.0
|
||||
target: x86_64
|
||||
|
||||
gitlab-ci:
|
||||
bootstrap:
|
||||
- name: compiler-pkgs
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
- # spack-cloud-ubuntu
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
# 'tags' and 'image' go directly onto the job, 'variables' will
|
||||
# be added to what we already necessarily create for the job as
|
||||
# a part of the CI workflow
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_ubuntu_18.04
|
||||
entrypoint: [""]
|
||||
- # spack-cloud-centos
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- 'os=centos7'
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_centos_7
|
||||
entrypoint: [""]
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: http://cdash
|
||||
project: Spack Testing
|
||||
site: Spack Docker-Compose Workflow
|
||||
|
||||
repos: []
|
||||
upstreams: {}
|
||||
modules:
|
||||
enable: []
|
||||
packages: {}
|
||||
config: {}
|
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
||||
|
||||
.. note::
|
||||
|
||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
||||
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||
|
||||
The following set of criteria (from lowest to highest precedence) explain
|
||||
common cases where concretization output may seem surprising at first.
|
||||
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
|
||||
concretizer:
|
||||
reuse: dependencies # other options are 'true' and 'false'
|
||||
|
||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
||||
as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
foo:
|
||||
prefer:
|
||||
- "@1.1: ~mpi"
|
||||
|
||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
and constraints from the command line as well as ``package.py`` files override all
|
||||
of the above. Requirements are specified as follows:
|
||||
|
||||
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
|
||||
foo:
|
||||
require:
|
||||
- "@1.2: +mpi"
|
||||
conflicts:
|
||||
- "@1.4"
|
||||
|
||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||
behavior and preferences influence what an optimal solution looks like.
|
||||
|
@@ -254,11 +254,12 @@ directory.
|
||||
Compiler configuration
|
||||
----------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and compiler versions.
|
||||
Compilers can be made available to Spack by specifying them manually in ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``.
|
||||
For convenience, Spack will automatically detect compilers the first time it needs them,
|
||||
if none is available.
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -273,11 +274,16 @@ compilers`` or ``spack compiler list``:
|
||||
|
||||
$ spack compilers
|
||||
==> Available compilers
|
||||
-- gcc ubuntu20.04-x86_64 ---------------------------------------
|
||||
gcc@9.4.0 gcc@8.4.0 gcc@10.5.0
|
||||
|
||||
-- llvm ubuntu20.04-x86_64 --------------------------------------
|
||||
llvm@12.0.0 llvm@11.0.0 llvm@10.0.0
|
||||
-- gcc ---------------------------------------------------------
|
||||
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||
-- intel -------------------------------------------------------
|
||||
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -296,22 +302,16 @@ An alias for ``spack compiler find``.
|
||||
``spack compiler find``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you do not see a compiler in the list shown by:
|
||||
Lists the compilers currently available to Spack. If you do not see
|
||||
a compiler in this list, but you want to use it with Spack, you can
|
||||
simply run ``spack compiler find`` with the path to where the
|
||||
compiler is installed. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler list
|
||||
|
||||
but you want to use it with Spack, you can simply run ``spack compiler find`` with the
|
||||
path to where the compiler is installed. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler find /opt/intel/oneapi/compiler/2025.1/bin/
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
intel-oneapi-compilers@2025.1.0
|
||||
==> Compilers are defined in the following files:
|
||||
/home/user/.spack/packages.yaml
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
intel@13.0.079
|
||||
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
@@ -322,7 +322,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
$ module load gcc/4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
@@ -331,7 +331,7 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
.. note::
|
||||
|
||||
By default, spack does not fill in the ``modules:`` field in the
|
||||
``packages.yaml`` file. If you are using a compiler from a
|
||||
``compilers.yaml`` file. If you are using a compiler from a
|
||||
module, then you should add this field manually.
|
||||
See the section on :ref:`compilers-requiring-modules`.
|
||||
|
||||
@@ -341,82 +341,91 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
``spack compiler info``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to see additional information on some specific compilers, you can run ``spack compiler info`` on it:
|
||||
If you want to see specifics on a particular compiler, you can run
|
||||
``spack compiler info`` on it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler info gcc
|
||||
gcc@=8.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-8
|
||||
cxx: /usr/bin/g++-8
|
||||
fortran: /usr/bin/gfortran-8
|
||||
$ spack compiler info intel@15
|
||||
intel@15.0.0:
|
||||
paths:
|
||||
cc = /usr/local/bin/icc-15.0.090
|
||||
cxx = /usr/local/bin/icpc-15.0.090
|
||||
f77 = /usr/local/bin/ifort-15.0.090
|
||||
fc = /usr/local/bin/ifort-15.0.090
|
||||
modules = []
|
||||
operating_system = centos6
|
||||
...
|
||||
|
||||
gcc@=9.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
fortran: /usr/bin/gfortran
|
||||
|
||||
gcc@=10.5.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
|
||||
This shows the details of the compilers that were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the version. We just said ``gcc``, and we got information
|
||||
about all the matching compilers.
|
||||
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the
|
||||
version. We just said ``intel@15``, and information about the only
|
||||
matching Intel compiler was displayed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Manual compiler configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by editing your ``~/.spack/packages.yaml`` file.
|
||||
You can do this by running ``spack config edit packages``, which will open the file in
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in
|
||||
:ref:`your favorite editor <controlling-the-editor>`.
|
||||
|
||||
Each compiler has an "external" entry in the file with some ``extra_attributes``:
|
||||
Each compiler configuration in the file looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
spec: intel@15.0.0
|
||||
|
||||
The compiler executables are listed under ``extra_attributes:compilers``, and are keyed by language.
|
||||
Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``.
|
||||
For compilers that do not support Fortran (like ``clang``), put
|
||||
``None`` for ``f77`` and ``fc``:
|
||||
|
||||
You can also add compiler flags to manually configured compilers. These flags should be specified in the
|
||||
``flags`` section of the compiler specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
spec: clang@3.3svn
|
||||
|
||||
Once you save the file, the configured compilers will show up in the
|
||||
list displayed by ``spack compilers``.
|
||||
|
||||
You can also add compiler flags to manually configured compilers. These
|
||||
flags should be specified in the ``flags`` section of the compiler
|
||||
specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
``cppflags``, ``ldflags``, and ``ldlibs``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
spec: gcc@4.7.2
|
||||
|
||||
These flags will be treated by spack as if they were entered from
|
||||
the command line each time this compiler is used. The compiler wrappers
|
||||
@@ -431,44 +440,95 @@ These variables should be specified in the ``environment`` section of the compil
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
intel-oneapi-compilers:
|
||||
externals:
|
||||
- spec: intel-oneapi-compilers@2025.1.0
|
||||
prefix: /opt/intel/oneapi
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/intel/oneapi/compiler/2025.1/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2025.1/bin/icpx
|
||||
fortran: /opt/intel/oneapi/compiler/2025.1/bin/ifx
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you are particular about which compiler/version you use, you might wish to have Spack build it for you.
|
||||
For example:
|
||||
If you are particular about which compiler/version you use, you might
|
||||
wish to have Spack build it for you. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install gcc@14+binutils
|
||||
$ spack install gcc@4.9.3
|
||||
|
||||
Once the compiler is installed, you can start using it without additional configuration:
|
||||
Once that has finished, you will need to add it to your
|
||||
``compilers.yaml`` file. You can then set Spack to use it by default
|
||||
by adding the following to your ``packages.yaml`` file:
|
||||
|
||||
.. code-block:: console
|
||||
.. code-block:: yaml
|
||||
|
||||
$ spack install hdf5~mpi %gcc@14
|
||||
|
||||
The same holds true for compilers that are made available from buildcaches, when reusing them is allowed.
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@4.9.3]
|
||||
|
||||
.. _compilers-requiring-modules:
|
||||
|
||||
@@ -476,26 +536,30 @@ The same holds true for compilers that are made available from buildcaches, when
|
||||
Compilers Requiring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Many installed compilers will work regardless of the environment they are called with.
|
||||
However, some installed compilers require environment variables to be set in order to run;
|
||||
this is typical for Intel and other proprietary compilers.
|
||||
Many installed compilers will work regardless of the environment they
|
||||
are called with. However, some installed compilers require
|
||||
``$LD_LIBRARY_PATH`` or other environment variables to be set in order
|
||||
to run; this is typical for Intel and other proprietary compilers.
|
||||
|
||||
On typical HPC clusters, these environment modifications are usually delegated to some "module" system.
|
||||
In such a case, you should tell Spack which module(s) to load in order to run the chosen compiler:
|
||||
In such a case, you should tell Spack which module(s) to load in order
|
||||
to run the chosen compiler (If the compiler does not come with a
|
||||
module file, you might consider making one by hand). Spack will load
|
||||
this module into the environment ONLY when the compiler is run, and
|
||||
NOT in general for a package's ``install()`` method. See, for
|
||||
example, this ``compilers.yaml`` file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /opt/compilers
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/compilers/bin/gcc-10
|
||||
cxx: /opt/compilers/bin/g++-10
|
||||
fortran: /opt/compilers/bin/gfortran-10
|
||||
modules: [gcc/10.5.0]
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [other/comp/gcc-5.3-sp3]
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
|
||||
cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
|
||||
f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
spec: gcc@5.3.0
|
||||
|
||||
Some compilers require special environment settings to be loaded not just
|
||||
to run, but also to execute the code they build, breaking packages that
|
||||
@@ -516,7 +580,7 @@ Licensed Compilers
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some proprietary compilers require licensing to use. If you need to
|
||||
use a licensed compiler, the process is similar to a mix of
|
||||
use a licensed compiler (eg, PGI), the process is similar to a mix of
|
||||
build your own, plus modules:
|
||||
|
||||
#. Create a Spack package (if it doesn't exist already) to install
|
||||
@@ -526,21 +590,24 @@ build your own, plus modules:
|
||||
using Spack to load the module it just created, and running simple
|
||||
builds (eg: ``cc helloWorld.c && ./a.out``)
|
||||
|
||||
#. Add the newly-installed compiler to ``packages.yaml`` as shown above.
|
||||
#. Add the newly-installed compiler to ``compilers.yaml`` as shown
|
||||
above.
|
||||
|
||||
.. _mixed-toolchains:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Fortran compilers on macOS
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^
|
||||
Mixed Toolchains
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Modern compilers typically come with related compilers for C, C++ and
|
||||
Fortran bundled together. When possible, results are best if the same
|
||||
compiler is used for all languages.
|
||||
|
||||
In some cases, this is not possible. For example, XCode on macOS provides no Fortran compilers.
|
||||
The user is therefore forced to use a mixed toolchain: XCode-provided Clang for C/C++ and e.g.
|
||||
GNU ``gfortran`` for Fortran.
|
||||
In some cases, this is not possible. For example, starting with macOS El
|
||||
Capitan (10.11), many packages no longer build with GCC, but XCode
|
||||
provides no Fortran compilers. The user is therefore forced to use a
|
||||
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
|
||||
Fortran.
|
||||
|
||||
#. You need to make sure that Xcode is installed. Run the following command:
|
||||
|
||||
@@ -593,25 +660,45 @@ GNU ``gfortran`` for Fortran.
|
||||
|
||||
Note: the flag is ``-license``, not ``--license``.
|
||||
|
||||
#. Run ``spack compiler find`` to locate Clang.
|
||||
|
||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||
gcc``), or from a `DMG installer
|
||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||
|
||||
#. Run ``spack compiler find`` to locate both Apple-Clang and GCC.
|
||||
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||
the path to ``gfortran``:
|
||||
|
||||
Since languages in Spack are modeled as virtual packages, ``apple-clang`` will be used to provide
|
||||
C and C++, while GCC will be used for Fortran.
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
# ...
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /path/to/bin/gfortran
|
||||
fc: /path/to/bin/gfortran
|
||||
spec: apple-clang@11.0.0
|
||||
|
||||
|
||||
If you used Spack to install GCC, you can get the installation prefix by
|
||||
``spack location -i gcc`` (this will only work if you have a single version
|
||||
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||
will be ``/usr/local/gfortran``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Compiler Verification
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can verify that your compilers are configured properly by installing a simple package. For example:
|
||||
You can verify that your compilers are configured properly by installing a
|
||||
simple package. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib-ng%gcc@5.3.0
|
||||
$ spack install zlib%gcc@5.3.0
|
||||
|
||||
|
||||
.. _vendor-specific-compiler-configuration:
|
||||
@@ -620,7 +707,9 @@ You can verify that your compilers are configured properly by installing a simpl
|
||||
Vendor-Specific Compiler Configuration
|
||||
--------------------------------------
|
||||
|
||||
This section provides details on how to get vendor-specific compilers working.
|
||||
With Spack, things usually "just work" with GCC. Not so for other
|
||||
compilers. This section provides details on how to get specific
|
||||
compilers working.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Intel Compilers
|
||||
@@ -642,8 +731,8 @@ compilers:
|
||||
you have installed from the ``PATH`` environment variable.
|
||||
|
||||
If you want use a version of ``gcc`` or ``g++`` other than the default
|
||||
version on your system, you need to use either the ``--gcc-install-dir``
|
||||
or ``--gcc-toolchain`` compiler option to specify the path to the version of
|
||||
version on your system, you need to use either the ``-gcc-name``
|
||||
or ``-gxx-name`` compiler option to specify the path to the version of
|
||||
``gcc`` or ``g++`` that you want to use."
|
||||
|
||||
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
|
||||
@@ -651,12 +740,76 @@ compilers:
|
||||
Intel compilers may therefore be configured in one of two ways with
|
||||
Spack: using modules, or using compiler flags.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Configuration with Modules
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can control which GCC is seen by the Intel compiler with modules.
|
||||
A module must be loaded both for the Intel Compiler (so it will run)
|
||||
and GCC (so the compiler can find the intended GCC). The following
|
||||
configuration in ``compilers.yaml`` illustrates this technique:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [gcc-4.9.3, intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The version number on the Intel compiler is a combination of
|
||||
the "native" Intel version number and the GNU compiler it is
|
||||
targeting.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Command Line Configuration
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can also control which GCC is seen by the Intel compiler by adding
|
||||
flags to the ``icc`` command:
|
||||
|
||||
#. Identify the location of the compiler you just installed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location --install-dir gcc
|
||||
~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
|
||||
|
||||
#. Set up ``compilers.yaml``, for example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
flags:
|
||||
cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
|
||||
fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler.
|
||||
It is recommended to use GCC for your C/C++ compilers.
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
|
||||
this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
|
||||
GCC for your C/C++ compilers.
|
||||
|
||||
The NAG Fortran compilers are a bit more strict than other compilers, and many
|
||||
packages will fail to install with error messages like:
|
||||
@@ -673,40 +826,44 @@ the command line:
|
||||
|
||||
$ spack install openmpi fflags="-mismatch"
|
||||
|
||||
Or it can be set permanently in your ``packages.yaml``:
|
||||
Or it can be set permanently in your ``compilers.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
nag:
|
||||
externals:
|
||||
- spec: nag@6.1
|
||||
prefix: /opt/nag/bin
|
||||
extra_attributes:
|
||||
compilers:
|
||||
fortran: /opt/nag/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
|
||||
cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
|
||||
f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
spec: nag@6.1
|
||||
|
||||
|
||||
---------------
|
||||
System Packages
|
||||
---------------
|
||||
|
||||
Once compilers are configured, one needs to determine which pre-installed system packages,
|
||||
if any, to use in builds. These are also configured in the ``~/.spack/packages.yaml`` file.
|
||||
For example, to use an OpenMPI installed in /opt/local, one would use:
|
||||
Once compilers are configured, one needs to determine which
|
||||
pre-installed system packages, if any, to use in builds. This is
|
||||
configured in the file ``~/.spack/packages.yaml``. For example, to use
|
||||
an OpenMPI installed in /opt/local, one would use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
buildable: False
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
buildable: False
|
||||
|
||||
In general, *Spack is easier to use and more reliable if it builds all of its own dependencies*.
|
||||
However, there are several packages for which one commonly needs to use system versions:
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
its own dependencies. However, there are several packages for which one
|
||||
commonly needs to use system versions:
|
||||
|
||||
^^^
|
||||
MPI
|
||||
@@ -719,7 +876,8 @@ you are unlikely to get a working MPI from Spack. Instead, use an
|
||||
appropriate pre-installed MPI.
|
||||
|
||||
If you choose a pre-installed MPI, you should consider using the
|
||||
pre-installed compiler used to build that MPI.
|
||||
pre-installed compiler used to build that MPI; see above on
|
||||
``compilers.yaml``.
|
||||
|
||||
^^^^^^^
|
||||
OpenSSL
|
||||
@@ -1283,9 +1441,9 @@ To configure Spack, first run the following command inside the Spack console:
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``packages.yaml`` file. On a fresh Windows install with the above packages
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``packages.yaml``
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
|
@@ -23,6 +23,7 @@ components for use by dependent packages:
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
@@ -69,15 +70,16 @@ This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
llvm-amdgpu:
|
||||
externals:
|
||||
- spec: llvm-amdgpu@=5.3.0
|
||||
prefix: /opt/rocm-5.3.0
|
||||
compilers:
|
||||
c: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
fortran: null
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
|
@@ -1,65 +0,0 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _include-yaml:
|
||||
|
||||
===============================
|
||||
Include Settings (include.yaml)
|
||||
===============================
|
||||
|
||||
Spack allows you to include configuration files through ``include.yaml``.
|
||||
Using the ``include:`` heading results in pulling in external configuration
|
||||
information to be used by any Spack command.
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||
example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- /path/to/a/required/config.yaml
|
||||
- path: /path/to/$os/$target/config
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||
indicates that included ``config.yaml`` file is required (so must exist).
|
||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||
the path is only included if it exists. The condition ``os == "ventura"``
|
||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||
path is only included when the operating system (``os``) is ``ventura``.
|
||||
|
||||
The same conditions and variables in `Spec List References
|
||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||
can be used for conditional activation in the ``when`` clauses.
|
||||
|
||||
Included files can be specified by path or by their parent directory.
|
||||
Paths may be absolute, relative (to the configuration file including the path),
|
||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||
schemes) are supported. Spack-specific, environment and user path variables
|
||||
can be used. (See :ref:`config-file-variables` for more information.)
|
||||
|
||||
A ``sha256`` is required for remote file URLs and must be specified as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
|
||||
Additionally, remote file URLs must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
@@ -71,7 +71,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
include_yaml
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
|
@@ -486,8 +486,6 @@ present. For instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-strong-preferences:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Conflicts and strong preferences
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -557,13 +555,14 @@ preferences.
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
|
||||
The ``target`` and ``providers`` preferences
|
||||
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||
target: [x86_64_v3]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
|
||||
.. _ci_artifacts:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CI Artifacts Directory Layout
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
|
||||
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
|
||||
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
|
||||
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
|
||||
|
||||
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
|
||||
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
|
||||
it require user specified files be written to any specific directory.
|
||||
|
||||
------------------------
|
||||
``concrete_environment``
|
||||
------------------------
|
||||
|
||||
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
|
||||
the concrete ``spack.lock`` for the CI environment.
|
||||
|
||||
--------
|
||||
``logs``
|
||||
--------
|
||||
|
||||
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
|
||||
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
|
||||
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
|
||||
|
||||
----------------
|
||||
``reproduction``
|
||||
----------------
|
||||
|
||||
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
|
||||
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
|
||||
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
|
||||
|
||||
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
|
||||
An example of what a ``repro.json`` may look like is here.
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
|
||||
"job_spec_json": "adios2.json",
|
||||
"ci_project_dir": "/builds/spack/spack"
|
||||
}
|
||||
|
||||
---------
|
||||
``tests``
|
||||
---------
|
||||
|
||||
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
|
||||
data in it depending on the package that was built and the availability of tests.
|
||||
|
||||
-------------
|
||||
``user_data``
|
||||
-------------
|
||||
|
||||
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
|
||||
Users may use this to store additional logs or metrics or other types of files generated by the build job.
|
||||
|
||||
-------------------------------------
|
||||
Using a custom spack in your pipeline
|
||||
-------------------------------------
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==8.2.3
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.18
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.27.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.2.0
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
1
lib/spack/env/aocc/clang
vendored
Symbolic link
1
lib/spack/env/aocc/clang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/aocc/clang++
vendored
Symbolic link
1
lib/spack/env/aocc/clang++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cpp
|
1
lib/spack/env/aocc/flang
vendored
Symbolic link
1
lib/spack/env/aocc/flang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../fc
|
1
lib/spack/env/arm/armclang
vendored
Symbolic link
1
lib/spack/env/arm/armclang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/arm/armclang++
vendored
Symbolic link
1
lib/spack/env/arm/armclang++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/arm/armflang
vendored
Symbolic link
1
lib/spack/env/arm/armflang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/c++
vendored
Symbolic link
1
lib/spack/env/c++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/c89
vendored
Symbolic link
1
lib/spack/env/c89
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/c99
vendored
Symbolic link
1
lib/spack/env/c99
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/case-insensitive/CC
vendored
Symbolic link
1
lib/spack/env/case-insensitive/CC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
@@ -36,9 +36,15 @@ readonly lsep=''
|
||||
# the script runs. They are set by routines in spack.build_environment
|
||||
# as part of the package installation process.
|
||||
readonly params="\
|
||||
SPACK_COMPILER_WRAPPER_PATH
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
@@ -178,10 +184,9 @@ execute() {
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-var-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-var-}
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
exit
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
@@ -297,36 +302,9 @@ fi
|
||||
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
|
||||
# including version checks (SPACK_XFLAGS variants are not applied
|
||||
# for version checks).
|
||||
command_from_argv0="${0##*/}"
|
||||
command="$command_from_argv0"
|
||||
command="${0##*/}"
|
||||
comp="CC"
|
||||
vcheck_flags=""
|
||||
|
||||
_command_from_flags() {
|
||||
while [ $# -ne 0 ]; do
|
||||
arg="$1"
|
||||
shift
|
||||
case "$arg" in
|
||||
-x|--language)
|
||||
_lang="$1"
|
||||
shift ;;
|
||||
-x*)
|
||||
_lang="${arg#-x}" ;;
|
||||
--language=*)
|
||||
_lang="${arg#--language=}" ;;
|
||||
*) continue ;;
|
||||
esac
|
||||
done
|
||||
|
||||
case "$_lang" in
|
||||
c) command=cc ;;
|
||||
c++|f77|f95|hip) command="$_lang" ;;
|
||||
*) command="$command_from_argv0" ;; # drop unknown languages
|
||||
esac
|
||||
}
|
||||
|
||||
_command_from_flags "$@"
|
||||
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
@@ -365,19 +343,8 @@ case "$command" in
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
hip)
|
||||
command="$SPACK_HIPCXX"
|
||||
language="HIP"
|
||||
comp="HIPCXX"
|
||||
lang_flags=HIP
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_HIPCXXFLAGS}"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
|
||||
comp="CXX"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
die "Unknown compiler: $command"
|
||||
@@ -432,12 +399,10 @@ fi
|
||||
#
|
||||
dtags_to_add="${SPACK_DTAGS_TO_ADD}"
|
||||
dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
|
||||
|
||||
linker_arg="ERROR: LINKER ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
|
||||
eval "linker_arg=\${SPACK_${comp}_LINKER_ARG:?${linker_arg}}"
|
||||
linker_arg="${SPACK_LINKER_ARG}"
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET"
|
||||
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
|
||||
|
||||
# Dump the mode and exit if the command is dump-mode.
|
||||
@@ -446,6 +411,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
||||
exit
|
||||
fi
|
||||
|
||||
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
|
||||
# *have* to set up Fortran executables, so we need to tell the user when a build is
|
||||
# about to attempt to use them unsuccessfully.
|
||||
if [ -z "$command" ]; then
|
||||
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
|
||||
fi
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
@@ -454,7 +426,7 @@ new_dirs=""
|
||||
IFS=':'
|
||||
for dir in $PATH; do
|
||||
addpath=true
|
||||
for spack_env_dir in $SPACK_COMPILER_WRAPPER_PATH; do
|
||||
for spack_env_dir in $SPACK_ENV_PATH; do
|
||||
case "${dir%%/}" in
|
||||
"$spack_env_dir"|'.'|'')
|
||||
addpath=false
|
||||
@@ -815,17 +787,15 @@ case "$mode" in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CC
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CXX
|
||||
;;
|
||||
F)
|
||||
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
|
||||
;;
|
||||
esac
|
||||
|
||||
# prepend target args
|
||||
preextend flags_list SPACK_TARGET_ARGS
|
||||
;;
|
||||
esac
|
||||
|
1
lib/spack/env/cce/case-insensitive/CC
vendored
Symbolic link
1
lib/spack/env/cce/case-insensitive/CC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../cc
|
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
Symbolic link
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../cc
|
1
lib/spack/env/cce/cc
vendored
Symbolic link
1
lib/spack/env/cce/cc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/cce/craycc
vendored
Symbolic link
1
lib/spack/env/cce/craycc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/cce/crayftn
vendored
Symbolic link
1
lib/spack/env/cce/crayftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/cce/ftn
vendored
Symbolic link
1
lib/spack/env/cce/ftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang
vendored
Symbolic link
1
lib/spack/env/clang/clang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang++
vendored
Symbolic link
1
lib/spack/env/clang/clang++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/clang/flang
vendored
Symbolic link
1
lib/spack/env/clang/flang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/clang/gfortran
vendored
Symbolic link
1
lib/spack/env/clang/gfortran
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/cpp
vendored
Symbolic link
1
lib/spack/env/cpp
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/f77
vendored
Symbolic link
1
lib/spack/env/f77
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/f90
vendored
Symbolic link
1
lib/spack/env/f90
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/f95
vendored
Symbolic link
1
lib/spack/env/f95
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/fc
vendored
Symbolic link
1
lib/spack/env/fc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/fj/case-insensitive/FCC
vendored
Symbolic link
1
lib/spack/env/fj/case-insensitive/FCC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../cc
|
1
lib/spack/env/fj/fcc
vendored
Symbolic link
1
lib/spack/env/fj/fcc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/fj/frt
vendored
Symbolic link
1
lib/spack/env/fj/frt
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/ftn
vendored
Symbolic link
1
lib/spack/env/ftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/gcc/g++
vendored
Symbolic link
1
lib/spack/env/gcc/g++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gcc
vendored
Symbolic link
1
lib/spack/env/gcc/gcc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gfortran
vendored
Symbolic link
1
lib/spack/env/gcc/gfortran
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/intel/icc
vendored
Symbolic link
1
lib/spack/env/intel/icc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/intel/icpc
vendored
Symbolic link
1
lib/spack/env/intel/icpc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/intel/ifort
vendored
Symbolic link
1
lib/spack/env/intel/ifort
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/ld
vendored
Symbolic link
1
lib/spack/env/ld
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/ld.gold
vendored
Symbolic link
1
lib/spack/env/ld.gold
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
Symbolic link
1
lib/spack/env/ld.lld
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/nag/nagfor
vendored
Symbolic link
1
lib/spack/env/nag/nagfor
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc
vendored
Symbolic link
1
lib/spack/env/nvhpc/nvc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc++
vendored
Symbolic link
1
lib/spack/env/nvhpc/nvc++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvfortran
vendored
Symbolic link
1
lib/spack/env/nvhpc/nvfortran
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/dpcpp
vendored
Symbolic link
1
lib/spack/env/oneapi/dpcpp
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icpx
vendored
Symbolic link
1
lib/spack/env/oneapi/icpx
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icx
vendored
Symbolic link
1
lib/spack/env/oneapi/icx
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/ifx
vendored
Symbolic link
1
lib/spack/env/oneapi/ifx
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgc++
vendored
Symbolic link
1
lib/spack/env/pgi/pgc++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgcc
vendored
Symbolic link
1
lib/spack/env/pgi/pgcc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgfortran
vendored
Symbolic link
1
lib/spack/env/pgi/pgfortran
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdclang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdclang++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdflang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../fc
|
1
lib/spack/env/xl/xlc
vendored
Symbolic link
1
lib/spack/env/xl/xlc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlc++
vendored
Symbolic link
1
lib/spack/env/xl/xlc++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf
vendored
Symbolic link
1
lib/spack/env/xl/xlf
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf90
vendored
Symbolic link
1
lib/spack/env/xl/xlf90
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc++_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlc++_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlc_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf90_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlf90_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlf_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
@@ -7,7 +7,6 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
@@ -21,7 +20,6 @@
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
IO,
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
|
||||
and vis versa.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
package,
|
||||
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||
link_install_prefix: bool = True,
|
||||
):
|
||||
def __init__(self, package, link_install_prefix=True):
|
||||
"""
|
||||
Args:
|
||||
package (spack.package_base.PackageBase): Package requiring links
|
||||
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||
the root directory in which to establish the simulated rpath, ie where the
|
||||
symlinks that comprise the "rpath" behavior will be installed.
|
||||
|
||||
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||
both is an error.
|
||||
|
||||
Default: None
|
||||
link_install_prefix (bool): Link against package's own install or stage root.
|
||||
Packages that run their own executables during build and require rpaths to
|
||||
the build directory during build time require this option.
|
||||
|
||||
Default: install
|
||||
the build directory during build time require this option. Default: install
|
||||
root
|
||||
|
||||
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||
both is an error.
|
||||
"""
|
||||
self.pkg = package
|
||||
self._addl_rpaths: set[str] = set()
|
||||
if link_install_prefix and base_modification_prefix:
|
||||
raise RuntimeError(
|
||||
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||
" or specify a `base_modification_prefix` for any other link type. "
|
||||
"Specifying both arguments is invalid."
|
||||
)
|
||||
if not (link_install_prefix or base_modification_prefix):
|
||||
raise RuntimeError(
|
||||
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||
" or base_modification_prefix to be specified."
|
||||
" Neither was provided."
|
||||
)
|
||||
|
||||
self._addl_rpaths = set()
|
||||
self.link_install_prefix = link_install_prefix
|
||||
if base_modification_prefix:
|
||||
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||
else:
|
||||
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||
if not self.link_install_prefix:
|
||||
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||
self._additional_library_dependents = set()
|
||||
|
||||
@property
|
||||
def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
base_pths = set()
|
||||
if self.link_install_prefix:
|
||||
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||
base_pths |= self._additional_library_dependents
|
||||
return base_pths
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
else:
|
||||
new_pth = pathlib.Path(pth)
|
||||
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||
if not path_is_in_prefix:
|
||||
raise RuntimeError(
|
||||
f"Attempting to generate rpath symlink out of rpath context:\
|
||||
{str(self.base_modification_prefix)}"
|
||||
)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
|
||||
@property
|
||||
@@ -2628,33 +2577,6 @@ def establish_link(self):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||
so an executable can test the libraries/executables with proper access
|
||||
to dependent dlls
|
||||
|
||||
Note: this is a no-op on all other platforms besides Windows
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||
test_dir: the testing directory in which we should construct an rpath
|
||||
"""
|
||||
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||
# add the testing directory as a location to install rpath symlinks
|
||||
mini_rpath.add_library_dependent(test_dir)
|
||||
|
||||
# check for whether build_directory is available, if not
|
||||
# assume the stage root is the build dir
|
||||
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||
# add the build dir & build dir bin
|
||||
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||
# construct rpath
|
||||
mini_rpath.establish_link()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
|
||||
os.utime(f, (os.path.getatime(f), mtime))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_file_position(stream):
|
||||
orig_pos = stream.tell()
|
||||
yield
|
||||
stream.seek(orig_pos)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||
with temporary_file_position(stream):
|
||||
stream.seek(loc, relative_to)
|
||||
yield
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_dir(
|
||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||
|
@@ -11,11 +11,10 @@
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -73,7 +72,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
|
||||
|
||||
|
||||
class Singleton:
|
||||
"""Wrapper for lazily initialized singleton objects."""
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory: Callable[[], object]):
|
||||
def __init__(self, factory):
|
||||
"""Create a new singleton to be inited with the factory function.
|
||||
|
||||
Most factories will simply create the object to be initialized and
|
||||
return it.
|
||||
|
||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||
may need to be initialized incrementally. If the factory returns a generator
|
||||
instead of a regular object, the singleton will assign each result yielded by
|
||||
the generator to the singleton instance. This allows methods called by
|
||||
the factory in later stages to refer back to the singleton.
|
||||
|
||||
Args:
|
||||
factory (function): function taking no arguments that creates the
|
||||
singleton instance.
|
||||
|
||||
factory (function): function taking no arguments that
|
||||
creates the singleton instance.
|
||||
"""
|
||||
self.factory = factory
|
||||
self._instance = None
|
||||
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
|
||||
@property
|
||||
def instance(self):
|
||||
if self._instance is None:
|
||||
instance = self.factory()
|
||||
|
||||
if isinstance(instance, types.GeneratorType):
|
||||
# if it's a generator, assign every value
|
||||
for value in instance:
|
||||
self._instance = value
|
||||
else:
|
||||
# if not, just assign the result like a normal singleton
|
||||
self._instance = instance
|
||||
|
||||
self._instance = self.factory()
|
||||
return self._instance
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -1016,8 +996,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
@@ -1097,88 +1080,3 @@ def __set__(self, instance, value):
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
KT = TypeVar("KT")
|
||||
VT = TypeVar("VT")
|
||||
|
||||
|
||||
class PriorityOrderedMapping(Mapping[KT, VT]):
|
||||
"""Mapping that iterates over key according to an integer priority. If the priority is
|
||||
the same for two keys, insertion order is what matters.
|
||||
|
||||
The priority is set when the key/value pair is added. If not set, the highest current priority
|
||||
is used.
|
||||
"""
|
||||
|
||||
_data: Dict[KT, VT]
|
||||
_priorities: List[Tuple[int, KT]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._data = {}
|
||||
# Tuple of (priority, key)
|
||||
self._priorities = []
|
||||
|
||||
def __getitem__(self, key: KT) -> VT:
|
||||
return self._data[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
|
||||
def __iter__(self):
|
||||
yield from (key for _, key in self._priorities)
|
||||
|
||||
def __reversed__(self):
|
||||
yield from (key for _, key in reversed(self._priorities))
|
||||
|
||||
def reversed_keys(self):
|
||||
"""Iterates over keys from the highest priority, to the lowest."""
|
||||
return reversed(self)
|
||||
|
||||
def reversed_values(self):
|
||||
"""Iterates over values from the highest priority, to the lowest."""
|
||||
yield from (self._data[key] for _, key in reversed(self._priorities))
|
||||
|
||||
def _highest_priority(self) -> int:
|
||||
if not self._priorities:
|
||||
return 0
|
||||
result, _ = self._priorities[-1]
|
||||
return result
|
||||
|
||||
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
||||
"""Adds a key/value pair to the mapping, with a specific priority.
|
||||
|
||||
If the priority is None, then it is assumed to be the highest priority value currently
|
||||
in the container.
|
||||
|
||||
Raises:
|
||||
ValueError: when the same priority is already in the mapping
|
||||
"""
|
||||
if priority is None:
|
||||
priority = self._highest_priority()
|
||||
|
||||
if key in self._data:
|
||||
self.remove(key)
|
||||
|
||||
self._priorities.append((priority, key))
|
||||
# We rely on sort being stable
|
||||
self._priorities.sort(key=lambda x: x[0])
|
||||
self._data[key] = value
|
||||
assert len(self._data) == len(self._priorities)
|
||||
|
||||
def remove(self, key: KT) -> VT:
|
||||
"""Removes a key from the mapping.
|
||||
|
||||
Returns:
|
||||
The value associated with the key being removed
|
||||
|
||||
Raises:
|
||||
KeyError: if the key is not in the mapping
|
||||
"""
|
||||
if key not in self._data:
|
||||
raise KeyError(f"cannot find {key}")
|
||||
|
||||
popped_item = self._data.pop(key)
|
||||
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
||||
assert len(self._data) == len(self._priorities)
|
||||
return popped_item
|
||||
|
@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
|
||||
self.src_a = src_a
|
||||
self.src_b = src_b
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
|
||||
|
||||
|
||||
def _samefile(a: str, b: str):
|
||||
try:
|
||||
return os.path.samefile(a, b)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
"""
|
||||
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
|
||||
):
|
||||
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# On case-insensitive filesystems, normalize paths to detect duplications
|
||||
self.normalize_paths = normalize_paths
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||
# bit to the relative path in the destination dir.
|
||||
self.projection: str = ""
|
||||
@@ -86,88 +71,10 @@ def __init__(
|
||||
# and can run mkdir in order.
|
||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
# If the visitor is configured to normalize paths, keep a map of
|
||||
# normalized path to: original path, root directory + relative path
|
||||
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||
self.files: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
# If the visitor is configured to normalize paths, keep a map of
|
||||
# normalized path to: original path, root directory + relative path
|
||||
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||
|
||||
def _in_directories(self, proj_rel_path: str) -> bool:
|
||||
"""
|
||||
Check if a path is already in the directory list
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return proj_rel_path.lower() in self._directories_normalized
|
||||
else:
|
||||
return proj_rel_path in self.directories
|
||||
|
||||
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Get the directory that is mapped to a path
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return self._directories_normalized[proj_rel_path.lower()]
|
||||
else:
|
||||
return (proj_rel_path, *self.directories[proj_rel_path])
|
||||
|
||||
def _del_directory(self, proj_rel_path: str):
|
||||
"""
|
||||
Remove a directory from the list of directories
|
||||
"""
|
||||
del self.directories[proj_rel_path]
|
||||
if self.normalize_paths:
|
||||
del self._directories_normalized[proj_rel_path.lower()]
|
||||
|
||||
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
|
||||
"""
|
||||
Add a directory to the list of directories.
|
||||
Also stores the normalized version for later lookups
|
||||
"""
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
if self.normalize_paths:
|
||||
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||
|
||||
def _in_files(self, proj_rel_path: str) -> bool:
|
||||
"""
|
||||
Check if a path is already in the files list
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return proj_rel_path.lower() in self._files_normalized
|
||||
else:
|
||||
return proj_rel_path in self.files
|
||||
|
||||
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Get the file that is mapped to a path
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return self._files_normalized[proj_rel_path.lower()]
|
||||
else:
|
||||
return (proj_rel_path, *self.files[proj_rel_path])
|
||||
|
||||
def _del_file(self, proj_rel_path: str):
|
||||
"""
|
||||
Remove a file from the list of files
|
||||
"""
|
||||
del self.files[proj_rel_path]
|
||||
if self.normalize_paths:
|
||||
del self._files_normalized[proj_rel_path.lower()]
|
||||
|
||||
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
|
||||
"""
|
||||
Add a file to the list of files
|
||||
Also stores the normalized version for later lookups
|
||||
"""
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
if self.normalize_paths:
|
||||
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
if self.ignore(rel_path):
|
||||
# Don't recurse when dir is ignored.
|
||||
return False
|
||||
elif self._in_files(proj_rel_path):
|
||||
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
|
||||
src_a = os.path.join(*self._file(proj_rel_path))
|
||||
src_b = os.path.join(root, rel_path)
|
||||
|
||||
if not _samefile(src_a, src_b):
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
elif proj_rel_path in self.files:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
return False
|
||||
|
||||
# Remove the link in favor of the dir.
|
||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||
self._del_file(existing_proj_rel_path)
|
||||
self._add_directory(proj_rel_path, root, rel_path)
|
||||
return True
|
||||
elif self._in_directories(proj_rel_path):
|
||||
)
|
||||
return False
|
||||
elif proj_rel_path in self.directories:
|
||||
# No new directory, carry on.
|
||||
return True
|
||||
else:
|
||||
# Register new directory.
|
||||
self._add_directory(proj_rel_path, root, rel_path)
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
if handle_as_dir:
|
||||
return self.before_visit_dir(root, rel_path, depth)
|
||||
|
||||
self.visit_file(root, rel_path, depth, symlink=True)
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif self._in_directories(proj_rel_path):
|
||||
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
|
||||
# in which case we simply drop the symlink in favor of the actual dir.
|
||||
src_a = os.path.join(*self._directory(proj_rel_path))
|
||||
src_b = os.path.join(root, rel_path)
|
||||
if not symlink or not _samefile(src_a, src_b):
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
elif self._in_files(proj_rel_path):
|
||||
)
|
||||
elif proj_rel_path in self.files:
|
||||
# When two files project to the same path, they conflict iff they are distinct.
|
||||
# If they are the same (i.e. one links to the other), register regular files rather
|
||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||
# file, not the symlink.
|
||||
src_a = os.path.join(*self._file(proj_rel_path))
|
||||
|
||||
src_a = os.path.join(*self.files[proj_rel_path])
|
||||
src_b = os.path.join(root, rel_path)
|
||||
if not _samefile(src_a, src_b):
|
||||
|
||||
try:
|
||||
samefile = os.path.samefile(src_a, src_b)
|
||||
except OSError:
|
||||
samefile = False
|
||||
|
||||
if not samefile:
|
||||
# Distinct files produce a conflict.
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
||||
if not symlink:
|
||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||
# order of the files dict, which is grouped by root.
|
||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||
self._del_file(existing_proj_rel_path)
|
||||
self._add_file(proj_rel_path, root, rel_path)
|
||||
del self.files[proj_rel_path]
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self._add_file(proj_rel_path, root, rel_path)
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
|
||||
path = ""
|
||||
for part in self.projection.split(os.sep):
|
||||
path = os.path.join(path, part)
|
||||
if not self._in_files(path):
|
||||
self._add_directory(path, "<projection>", path)
|
||||
if path not in self.files:
|
||||
self.directories[path] = ("<projection>", path)
|
||||
else:
|
||||
# Can't create a dir where a file is.
|
||||
_, src_a_root, src_a_relpath = self._file(path)
|
||||
src_a_root, src_a_relpath = self.files[path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=path,
|
||||
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if self.src._in_files(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
|
||||
# If destination dir was also a src dir, remove the mkdir
|
||||
# action, and traverse deeper.
|
||||
if self.src._in_directories(rel_path):
|
||||
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
|
||||
self.src._del_directory(existing_proj_rel_path)
|
||||
if rel_path in self.src.directories:
|
||||
del self.src.directories[rel_path]
|
||||
return True
|
||||
|
||||
# If the destination dir does not appear in the src dir,
|
||||
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
be seen as files; we should not accidentally merge
|
||||
source dir with a symlinked dest dir.
|
||||
"""
|
||||
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Can't merge a file if target already exists
|
||||
if self.src._in_directories(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
|
||||
# Always conflict
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
elif self.src._in_files(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
elif rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
|
@@ -269,7 +269,7 @@ def __init__(
|
||||
|
||||
@staticmethod
|
||||
def _poll_interval_generator(
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None,
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||
) -> Generator[float, None, None]:
|
||||
"""This implements a backoff scheme for polling a contended resource
|
||||
by suggesting a succession of wait times between polls.
|
||||
|
@@ -2,7 +2,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Utility classes for logging the output of blocks of code."""
|
||||
"""Utility classes for logging the output of blocks of code.
|
||||
"""
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
@@ -343,6 +344,26 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
|
||||
If `env` is empty (or None), this unsets all current environment
|
||||
variables.
|
||||
"""
|
||||
env = env or {}
|
||||
old_env = os.environ.copy()
|
||||
try:
|
||||
os.environ.clear()
|
||||
for name, val in env.items():
|
||||
os.environ[name] = val
|
||||
yield
|
||||
finally:
|
||||
os.environ.clear()
|
||||
for name, val in old_env.items():
|
||||
os.environ[name] = val
|
||||
|
||||
|
||||
def log_output(*args, **kwargs):
|
||||
"""Context manager that logs its output to a file.
|
||||
|
||||
@@ -426,6 +447,7 @@ def __init__(
|
||||
self.echo = echo
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
self.env = env # the environment to use for _writer_daemon
|
||||
self.filter_fn = filter_fn
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
@@ -497,20 +519,21 @@ def __enter__(self):
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_fd:
|
||||
@@ -706,7 +729,10 @@ class winlog:
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
|
||||
def __init__(
|
||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||
):
|
||||
self.env = env
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
|
@@ -13,18 +13,6 @@
|
||||
__version__ = "1.0.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
#: compatibility with vX.0.
|
||||
min_package_api_version = (1, 0)
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
@@ -91,6 +79,4 @@ def get_short_version() -> str:
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
"package_api_version",
|
||||
"min_package_api_version",
|
||||
]
|
||||
|
@@ -1,20 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
|
||||
|
||||
BUILTIN_TO_LEGACY_COMPILER = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
LEGACY_COMPILER_TO_BUILTIN = {
|
||||
"clang": "llvm",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
|
||||
def check_virtual_with_variants(spec, msg):
|
||||
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
|
||||
if not spec.virtual or not spec.variants:
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user