Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
675a182f14 Ignore provider weights for runtimes
Runtimes are inherently tied to the associated compilers,
and choices among runtimes should be delegated to the compiler
prioritization criteria not the provider weights. This fixes
a bug causing concretization to mix compilers more than necessary
to avoid using the runtime associated with the compiler specified
for the root. E.g. `foo%oneapi` building dependencies with `%gcc`
to minimize edges on which `intel-oneapi-runtime` provides
`fortran-rt`.

Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-24 18:07:59 -07:00
744 changed files with 12001 additions and 14119 deletions

View File

@@ -9,7 +9,6 @@ on:
branches: branches:
- develop - develop
- releases/** - releases/**
merge_group:
concurrency: concurrency:
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}} group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -26,17 +25,13 @@ jobs:
packages: ${{ steps.filter.outputs.packages }} packages: ${{ steps.filter.outputs.packages }}
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }} if: ${{ github.event_name == 'push' }}
with: with:
fetch-depth: 0 fetch-depth: 0
# For pull requests it's not necessary to checkout the code # For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
id: filter id: filter
with: with:
# For merge group events, compare against the target branch (main)
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
# For merge group events, use the merge group head ref
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below # See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
# Don't run if we only modified packages in the # Don't run if we only modified packages in the
# built-in repository or documentation # built-in repository or documentation
@@ -81,11 +76,10 @@ jobs:
prechecks: prechecks:
needs: [ changes ] needs: [ changes ]
uses: ./.github/workflows/prechecks.yml uses: ./.github/workflows/valid-style.yml
secrets: inherit secrets: inherit
with: with:
with_coverage: ${{ needs.changes.outputs.core }} with_coverage: ${{ needs.changes.outputs.core }}
with_packages: ${{ needs.changes.outputs.packages }}
import-check: import-check:
needs: [ changes ] needs: [ changes ]
@@ -107,7 +101,6 @@ jobs:
coverage: coverage:
needs: [ unit-tests, prechecks ] needs: [ unit-tests, prechecks ]
if: ${{ needs.changes.outputs.core }}
uses: ./.github/workflows/coverage.yml uses: ./.github/workflows/coverage.yml
secrets: inherit secrets: inherit

View File

@@ -1,4 +1,4 @@
name: prechecks name: style
on: on:
workflow_call: workflow_call:
@@ -6,9 +6,6 @@ on:
with_coverage: with_coverage:
required: true required: true
type: string type: string
with_packages:
required: true
type: string
concurrency: concurrency:
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}} group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -33,7 +30,6 @@ jobs:
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/ run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories) - name: vermin (Repositories)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed # Run style checks on the files that have been changed
style: style:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -57,25 +53,12 @@ jobs:
- name: Run style tests - name: Run style tests
run: | run: |
share/spack/qa/run-style-tests share/spack/qa/run-style-tests
audit: audit:
uses: ./.github/workflows/audit.yaml uses: ./.github/workflows/audit.yaml
secrets: inherit secrets: inherit
with: with:
with_coverage: ${{ inputs.with_coverage }} with_coverage: ${{ inputs.with_coverage }}
python_version: '3.13' python_version: '3.13'
verify-checksums:
if: ${{ inputs.with_packages == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 2
- name: Verify Added Checksums
run: |
bin/spack ci verify-versions HEAD^1 HEAD
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8 # Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8: bootstrap-dev-rhel8:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -19,7 +19,7 @@ config:
install_tree: install_tree:
root: $spack/opt/spack root: $spack/opt/spack
projections: projections:
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}" all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# install_tree can include an optional padded length (int or boolean) # install_tree can include an optional padded length (int or boolean)
# default is False (do not pad) # default is False (do not pad)
# if padded_length is True, Spack will pad as close to the system max path # if padded_length is True, Spack will pad as close to the system max path

View File

@@ -15,11 +15,12 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler:
- apple-clang
- clang
- gcc
providers: providers:
c: [apple-clang, llvm, gcc]
cxx: [apple-clang, llvm, gcc]
elf: [libelf] elf: [libelf]
fortran: [gcc]
fuse: [macfuse] fuse: [macfuse]
gl: [apple-gl] gl: [apple-gl]
glu: [apple-glu] glu: [apple-glu]
@@ -49,12 +50,3 @@ packages:
# although the version number used here isn't critical # although the version number used here isn't critical
- spec: apple-libuuid@1353.100.2 - spec: apple-libuuid@1353.100.2
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
c:
prefer:
- apple-clang
cxx:
prefer:
- apple-clang
fortran:
prefer:
- gcc

View File

@@ -15,18 +15,19 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
providers: providers:
awk: [gawk] awk: [gawk]
armci: [armcimpi] armci: [armcimpi]
blas: [openblas, amdblis] blas: [openblas, amdblis]
c: [gcc, llvm, intel-oneapi-compilers] c: [gcc]
cxx: [gcc, llvm, intel-oneapi-compilers] cxx: [gcc]
D: [ldc] D: [ldc]
daal: [intel-oneapi-daal] daal: [intel-oneapi-daal]
elf: [elfutils] elf: [elfutils]
fftw-api: [fftw, amdfftw] fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame] flame: [libflame, amdlibflame]
fortran: [gcc, llvm, intel-oneapi-compilers] fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime] fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse] fuse: [libfuse]
gl: [glx, osmesa] gl: [glx, osmesa]

View File

@@ -15,8 +15,8 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler:
- msvc
providers: providers:
c : [msvc]
cxx: [msvc]
mpi: [msmpi] mpi: [msmpi]
gl: [wgl] gl: [wgl]

View File

@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
* :ref:`compilers.yaml <compiler-config>` * :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>` * :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>` * :ref:`config.yaml <config-yaml>`
* :ref:`include.yaml <include-yaml>`
* :ref:`mirrors.yaml <mirrors>` * :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>` * :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <packages-config>` * :ref:`packages.yaml <packages-config>`

View File

@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
version (and other constraints) passed as the spec argument to the version (and other constraints) passed as the spec argument to the
``spack develop`` command. ``spack develop`` command.
When working deep in the graph it is often desirable to have multiple specs marked
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
to ensure that all the dependents of the initial spec you provide are also marked
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
so the graph can be traversed from the supplied spec all the way to the root specs.
For packages with ``git`` attributes, git branches, tags, and commits can For packages with ``git`` attributes, git branches, tags, and commits can
also be used as valid concrete versions (see :ref:`version-specifier`). also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
@@ -677,45 +670,24 @@ This configuration sets the default compiler for all packages to
Included configurations Included configurations
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Spack environments allow an ``include`` heading in their yaml schema. Spack environments allow an ``include`` heading in their yaml
This heading pulls in external configuration files and applies them to schema. This heading pulls in external configuration files and applies
the environment. them to the environment.
.. code-block:: yaml .. code-block:: yaml
spack: spack:
include: include:
- environment/relative/path/to/config.yaml - relative/path/to/config.yaml
- https://github.com/path/to/raw/config/compilers.yaml - https://github.com/path/to/raw/config/compilers.yaml
- /absolute/path/to/packages.yaml - /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. (See
:ref:`include-yaml` for more information on optional and conditional entries.)
Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.
Environments can include files or URLs. File paths can be relative or
absolute. URLs include the path to the text for individual files or
can be the path to a directory containing configuration files.
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
schemes). Spack-specific, environment and user path variables may be
used in these paths. See :ref:`config-file-variables` for more information.
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Configuration precedence Configuration precedence

View File

@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
.. note:: .. note::
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults. As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
The following set of criteria (from lowest to highest precedence) explain The following set of criteria (from lowest to highest precedence) explain
common cases where concretization output may seem surprising at first. common cases where concretization output may seem surprising at first.
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
concretizer: concretizer:
reuse: dependencies # other options are 'true' and 'false' reuse: dependencies # other options are 'true' and 'false'
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml`` 3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
are higher priority than reuse, and can be used to strongly prefer a specific version
or variant, without erroring out if it's not possible. Strong preferences are specified
as follows:
.. code-block:: yaml
packages:
foo:
prefer:
- "@1.1: ~mpi"
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
and constraints from the command line as well as ``package.py`` files override all and constraints from the command line as well as ``package.py`` files override all
of the above. Requirements are specified as follows: of the above. Requirements are specified as follows:
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
foo: foo:
require: require:
- "@1.2: +mpi" - "@1.2: +mpi"
conflicts:
- "@1.4"
Requirements and constraints restrict the set of possible solutions, while reuse Requirements and constraints restrict the set of possible solutions, while reuse
behavior and preferences influence what an optimal solution looks like. behavior and preferences influence what an optimal solution looks like.

View File

@@ -1,51 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _include-yaml:
===============================
Include Settings (include.yaml)
===============================
Spack allows you to include configuration files through ``include.yaml``.
Using the ``include:`` heading results in pulling in external configuration
information to be used by any Spack command.
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. For
example,
.. code-block:: yaml
include:
- /path/to/a/required/config.yaml
- path: /path/to/$os/$target/config
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
indicates that included ``config.yaml`` file is required (so must exist).
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
the path is only included if it exists. The condition ``os == "ventura"``
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
path is only included when the operating system (``os``) is ``ventura``.
The same conditions and variables in `Spec List References
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
can be used for conditional activation in the ``when`` clauses.
Included files can be specified by path or by their parent directory.
Paths may be absolute, relative (to the configuration file including the path),
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.

View File

@@ -71,7 +71,6 @@ or refer to the full manual below.
configuration configuration
config_yaml config_yaml
include_yaml
packages_yaml packages_yaml
build_settings build_settings
environments environments

View File

@@ -486,8 +486,6 @@ present. For instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-strong-preferences:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Conflicts and strong preferences Conflicts and strong preferences
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
- spack --version - spack --version
- cd ${SPACK_CONCRETE_ENV_DIR} - cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view . - spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'" - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi

1
lib/spack/env/aocc/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/aocc/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/aocc/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/arm/armclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/c++ vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c89 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c99 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -36,9 +36,15 @@ readonly lsep=''
# the script runs. They are set by routines in spack.build_environment # the script runs. They are set by routines in spack.build_environment
# as part of the package installation process. # as part of the package installation process.
readonly params="\ readonly params="\
SPACK_COMPILER_WRAPPER_PATH SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG
SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG
SPACK_SHORT_SPEC SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS SPACK_SYSTEM_DIRS
SPACK_MANAGED_DIRS" SPACK_MANAGED_DIRS"
@@ -339,9 +345,6 @@ case "$command" in
;; ;;
ld|ld.gold|ld.lld) ld|ld.gold|ld.lld)
mode=ld mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;; ;;
*) *)
die "Unknown compiler: $command" die "Unknown compiler: $command"
@@ -396,12 +399,10 @@ fi
# #
dtags_to_add="${SPACK_DTAGS_TO_ADD}" dtags_to_add="${SPACK_DTAGS_TO_ADD}"
dtags_to_strip="${SPACK_DTAGS_TO_STRIP}" dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
linker_arg="${SPACK_LINKER_ARG}"
linker_arg="ERROR: LINKER ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
eval "linker_arg=\${SPACK_${comp}_LINKER_ARG:?${linker_arg}}"
# Set up rpath variable according to language. # Set up rpath variable according to language.
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?" rpath="ERROR: RPATH ARG WAS NOT SET"
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}" eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
# Dump the mode and exit if the command is dump-mode. # Dump the mode and exit if the command is dump-mode.
@@ -410,6 +411,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
exit exit
fi fi
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
# *have* to set up Fortran executables, so we need to tell the user when a build is
# about to attempt to use them unsuccessfully.
if [ -z "$command" ]; then
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
fi
# #
# Filter '.' and Spack environment directories out of PATH so that # Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself # this script doesn't just call itself
@@ -418,7 +426,7 @@ new_dirs=""
IFS=':' IFS=':'
for dir in $PATH; do for dir in $PATH; do
addpath=true addpath=true
for spack_env_dir in $SPACK_COMPILER_WRAPPER_PATH; do for spack_env_dir in $SPACK_ENV_PATH; do
case "${dir%%/}" in case "${dir%%/}" in
"$spack_env_dir"|'.'|'') "$spack_env_dir"|'.'|'')
addpath=false addpath=false
@@ -779,17 +787,15 @@ case "$mode" in
C) C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS extend spack_flags_list SPACK_CFLAGS
preextend flags_list SPACK_TARGET_ARGS_CC
;; ;;
CXX) CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS extend spack_flags_list SPACK_CXXFLAGS
preextend flags_list SPACK_TARGET_ARGS_CXX
;;
F)
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
;; ;;
esac esac
# prepend target args
preextend flags_list SPACK_TARGET_ARGS
;; ;;
esac esac

1
lib/spack/env/cce/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/cce/cc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/craycc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/crayftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cpp vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f77 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f90 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f95 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fc vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fj/case-insensitive/FCC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/fj/fcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/fj/frt vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/gcc/g++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icpc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/ifort vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.gold vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.lld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/nag/nagfor vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/dpcpp vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icpx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/ifx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/rocmcc/amdflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/xl/xlc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf90 vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc++_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf90_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -11,7 +11,6 @@
import re import re
import sys import sys
import traceback import traceback
import types
import typing import typing
import warnings import warnings
from datetime import datetime, timedelta from datetime import datetime, timedelta
@@ -73,7 +72,7 @@ def index_by(objects, *funcs):
if isinstance(f, str): if isinstance(f, str):
f = lambda x: getattr(x, funcs[0]) f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple): elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0]) f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {} result = {}
for o in objects: for o in objects:
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
class Singleton: class Singleton:
"""Wrapper for lazily initialized singleton objects.""" """Simple wrapper for lazily initialized singleton objects."""
def __init__(self, factory: Callable[[], object]): def __init__(self, factory):
"""Create a new singleton to be inited with the factory function. """Create a new singleton to be inited with the factory function.
Most factories will simply create the object to be initialized and
return it.
In some cases, e.g. when bootstrapping some global state, the singleton
may need to be initialized incrementally. If the factory returns a generator
instead of a regular object, the singleton will assign each result yielded by
the generator to the singleton instance. This allows methods called by
the factory in later stages to refer back to the singleton.
Args: Args:
factory (function): function taking no arguments that creates the factory (function): function taking no arguments that
singleton instance. creates the singleton instance.
""" """
self.factory = factory self.factory = factory
self._instance = None self._instance = None
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
@property @property
def instance(self): def instance(self):
if self._instance is None: if self._instance is None:
instance = self.factory() self._instance = self.factory()
if isinstance(instance, types.GeneratorType):
# if it's a generator, assign every value
for value in instance:
self._instance = value
else:
# if not, just assign the result like a normal singleton
self._instance = instance
return self._instance return self._instance
def __getattr__(self, name): def __getattr__(self, name):
@@ -1016,8 +996,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
def grouped_message(self, with_tracebacks: bool = True) -> str: def grouped_message(self, with_tracebacks: bool = True) -> str:
"""Print out an error message coalescing all the forwarded errors.""" """Print out an error message coalescing all the forwarded errors."""
each_exception_message = [ each_exception_message = [
"\n\t{0} raised {1}: {2}\n{3}".format( "{0} raised {1}: {2}{3}".format(
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else "" context,
exc.__class__.__name__,
exc,
"\n{0}".format("".join(tb)) if with_tracebacks else "",
) )
for context, exc, tb in self.exceptions for context, exc, tb in self.exceptions
] ]

View File

@@ -1,20 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
BUILTIN_TO_LEGACY_COMPILER = {
"llvm": "clang",
"intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc",
"intel-oneapi-compiler-classic": "intel",
"acfl": "arm",
}
LEGACY_COMPILER_TO_BUILTIN = {
"clang": "llvm",
"oneapi": "intel-oneapi-compilers",
"rocmcc": "llvm-amdgpu",
"intel": "intel-oneapi-compiler-classic",
"arm": "acfl",
}

View File

@@ -110,13 +110,6 @@ def __init__(self, root):
self._write_transaction_impl = llnl.util.lang.nullcontext self._write_transaction_impl = llnl.util.lang.nullcontext
self._read_transaction_impl = llnl.util.lang.nullcontext self._read_transaction_impl = llnl.util.lang.nullcontext
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
if not self.is_readable():
raise spack_db.DatabaseNotReadableError(
f"cannot read buildcache v{self.db_version} at {self.root}"
)
return self._handle_current_version_read(check, db)
class FetchCacheError(Exception): class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list.""" """Error thrown when fetching the cache failed, usually a composite error list."""
@@ -249,7 +242,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
self._index_file_cache.init_entry(cache_key) self._index_file_cache.init_entry(cache_key)
cache_path = self._index_file_cache.cache_path(cache_key) cache_path = self._index_file_cache.cache_path(cache_key)
with self._index_file_cache.read_transaction(cache_key): with self._index_file_cache.read_transaction(cache_key):
db._read_from_file(pathlib.Path(cache_path)) db._read_from_file(cache_path)
except spack_db.InvalidDatabaseVersionError as e: except spack_db.InvalidDatabaseVersionError as e:
tty.warn( tty.warn(
f"you need a newer Spack version to read the buildcache index for the " f"you need a newer Spack version to read the buildcache index for the "

View File

@@ -234,6 +234,14 @@ def _root_spec(spec_str: str) -> str:
# Add a compiler and platform requirement to the root spec. # Add a compiler and platform requirement to the root spec.
platform = str(spack.platforms.host()) platform = str(spack.platforms.host())
if platform == "darwin":
spec_str += " %apple-clang"
elif platform == "windows":
spec_str += " %msvc"
elif platform == "linux":
spec_str += " %gcc"
elif platform == "freebsd":
spec_str += " %clang"
spec_str += f" platform={platform}" spec_str += f" platform={platform}"
target = archspec.cpu.host().family target = archspec.cpu.host().family
spec_str += f" target={target}" spec_str += f" target={target}"

View File

@@ -15,13 +15,11 @@
import archspec.cpu import archspec.cpu
import spack.compilers.config import spack.compiler
import spack.compilers.libraries import spack.compilers
import spack.config
import spack.platforms import spack.platforms
import spack.spec import spack.spec
import spack.traverse import spack.traverse
import spack.version
from .config import spec_for_current_python from .config import spec_for_current_python
@@ -40,7 +38,7 @@ def __init__(self, configuration):
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration) self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self): def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux": if str(self.host_platform) == "linux":
compiler_name = "gcc" compiler_name = "gcc"
elif str(self.host_platform) == "darwin": elif str(self.host_platform) == "darwin":
@@ -48,30 +46,17 @@ def _valid_compiler_or_raise(self):
elif str(self.host_platform) == "windows": elif str(self.host_platform) == "windows":
compiler_name = "msvc" compiler_name = "msvc"
elif str(self.host_platform) == "freebsd": elif str(self.host_platform) == "freebsd":
compiler_name = "llvm" compiler_name = "clang"
else: else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}") raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec(
candidates = [ compiler_name, arch_spec=self.host_architecture
x )
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
if x.name == compiler_name
]
if not candidates: if not candidates:
raise RuntimeError( raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources" f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
) )
candidates.sort(key=lambda x: x.version, reverse=True) candidates.sort(key=lambda x: x.spec.version, reverse=True)
best = candidates[0]
# Get compilers for bootstrapping from the 'builtin' repository
best.namespace = "builtin"
# If the compiler does not support C++ 14, fail with a legible error message
try:
_ = best.package.standard_flag(language="cxx", standard="14")
except RuntimeError as e:
raise RuntimeError(
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
) from e
return candidates[0] return candidates[0]
def _externals_from_yaml( def _externals_from_yaml(
@@ -90,6 +75,9 @@ def _externals_from_yaml(
if not s.satisfies(requirements[pkg_name]): if not s.satisfies(requirements[pkg_name]):
continue continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"): if not s.intersects(f"arch={self.host_architecture}"):
continue continue
@@ -122,14 +110,11 @@ def concretize(self) -> "spack.spec.Spec":
# Tweak it to conform to the host architecture # Tweak it to conform to the host architecture
for node in s.traverse(): for node in s.traverse():
node.architecture.os = str(self.host_os) node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture node.architecture = self.host_architecture
if node.name == "gcc-runtime": if node.name == "gcc-runtime":
node.versions = self.host_compiler.versions node.versions = self.host_compiler.spec.versions
# Can't use re2c@3.1 with Python 3.6
if self.host_python.satisfies("@3.6"):
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
for edge in spack.traverse.traverse_edges([s], cover="edges"): for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python": if edge.spec.name == "python":
@@ -141,9 +126,6 @@ def concretize(self) -> "spack.spec.Spec":
if edge.spec.name == "cmake" and self.external_cmake: if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake edge.spec = self.external_cmake
if edge.spec.name == self.host_compiler.name:
edge.spec = self.host_compiler
if "libc" in edge.virtuals: if "libc" in edge.virtuals:
edge.spec = self.host_libc edge.spec = self.host_libc
@@ -159,12 +141,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
return self._external_spec(result) return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec": def libc_external_spec(self) -> "spack.spec.Spec":
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler) result = self.host_compiler.default_libc
result = detector.default_libc()
return self._external_spec(result) return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec": def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin" initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags(): for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = [] initial_spec.compiler_flags[flag_type] = []

View File

@@ -10,7 +10,7 @@
from llnl.util import tty from llnl.util import tty
import spack.compilers.config import spack.compilers
import spack.config import spack.config
import spack.environment import spack.environment
import spack.modules import spack.modules
@@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None: def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch() arch = spack.spec.ArchSpec.default_arch()
if not spack.compilers.config.compilers_for_arch(arch): if not spack.compilers.compilers_for_arch(arch):
spack.compilers.config.find_compilers() spack.compilers.find_compilers()
@contextlib.contextmanager @contextlib.contextmanager

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -36,6 +36,7 @@
import multiprocessing import multiprocessing
import os import os
import re import re
import stat
import sys import sys
import traceback import traceback
import types import types
@@ -70,7 +71,7 @@
import spack.build_systems.meson import spack.build_systems.meson
import spack.build_systems.python import spack.build_systems.python
import spack.builder import spack.builder
import spack.compilers.libraries import spack.compilers
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
@@ -84,6 +85,7 @@
import spack.store import spack.store
import spack.subprocess_context import spack.subprocess_context
import spack.util.executable import spack.util.executable
import spack.util.libc
from spack import traverse from spack import traverse
from spack.context import Context from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError from spack.error import InstallError, NoHeadersError, NoLibrariesError
@@ -91,8 +93,6 @@
from spack.util.environment import ( from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY, SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications, EnvironmentModifications,
ModificationList,
PrependPath,
env_flag, env_flag,
filter_system_paths, filter_system_paths,
get_path, get_path,
@@ -113,7 +113,7 @@
# set_wrapper_variables and used to pass parameters to # set_wrapper_variables and used to pass parameters to
# Spack's compiler wrappers. # Spack's compiler wrappers.
# #
SPACK_COMPILER_WRAPPER_PATH = "SPACK_COMPILER_WRAPPER_PATH" SPACK_ENV_PATH = "SPACK_ENV_PATH"
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS" SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS" SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
SPACK_LINK_DIRS = "SPACK_LINK_DIRS" SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
@@ -390,10 +390,62 @@ def _add_werror_handling(keep_werror, env):
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags])) env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
def set_wrapper_environment_variables_for_flags(pkg, env): def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete assert pkg.spec.concrete
compiler = pkg.compiler
spec = pkg.spec spec = pkg.spec
# Make sure the executables for this compiler exist
compiler.verify_executables()
# Set compiler variables used by CMake and autotools
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
# Populate an object with the list of environment modifications
# and return it
# TODO : add additional kwargs for better diagnostics, like requestor,
# ttyout, ttyerr, etc.
link_dir = spack.paths.build_env_path
# Set SPACK compiler variables so that our wrapper knows what to
# call. If there is no compiler configured then use a default
# wrapper which will emit an error if it is used.
if compiler.cc:
env.set("SPACK_CC", compiler.cc)
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
else:
env.set("CC", os.path.join(link_dir, "cc"))
if compiler.cxx:
env.set("SPACK_CXX", compiler.cxx)
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
else:
env.set("CC", os.path.join(link_dir, "c++"))
if compiler.f77:
env.set("SPACK_F77", compiler.f77)
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
else:
env.set("F77", os.path.join(link_dir, "f77"))
if compiler.fc:
env.set("SPACK_FC", compiler.fc)
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
else:
env.set("FC", os.path.join(link_dir, "fc"))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
# Check whether we want to force RPATH or RUNPATH
if spack.config.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
else:
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
if pkg.keep_werror is not None: if pkg.keep_werror is not None:
keep_werror = pkg.keep_werror keep_werror = pkg.keep_werror
else: else:
@@ -401,6 +453,10 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
_add_werror_handling(keep_werror, env) _add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add
isa_arg = optimization_flags(compiler, spec.target)
env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate. # Trap spack-tracked compiler flags as appropriate.
# env_flags are easy to accidentally override. # env_flags are easy to accidentally override.
inject_flags = {} inject_flags = {}
@@ -433,23 +489,75 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
# implicit variables # implicit variables
env.set(flag.upper(), " ".join(f for f in env_flags[flag])) env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
pkg.flags_to_build_system_args(build_system_flags) pkg.flags_to_build_system_args(build_system_flags)
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY) env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
compiler.setup_custom_environment(pkg, env)
return env return env
def optimization_flags(compiler, target): def optimization_flags(compiler, target):
if spack.compilers.is_mixed_toolchain(compiler):
msg = (
"microarchitecture specific optimizations are not "
"supported yet on mixed compiler toolchains [check"
f" {compiler.name}@{compiler.version} for further details]"
)
tty.debug(msg)
return ""
# Try to check if the current compiler comes with a version number or # Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a # has an unexpected suffix. If so, treat it as a compiler with a
# custom spec. # custom spec.
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string) compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
try: try:
result = target.optimization_flags(compiler.name, version_number) compiler_version = compiler.real_version
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
try:
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture): except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
result = "" result = ""
return result return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env): def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix """Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH. `SPACK_`) and also add the compiler wrappers to PATH.
@@ -458,8 +566,39 @@ def set_wrapper_variables(pkg, env):
this function computes these options in a manner that is intended to match the DAG traversal this function computes these options in a manner that is intended to match the DAG traversal
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
is using topo order.""" is using topo order."""
# Set compiler flags injected from the spec # Set environment variables if specified for
set_wrapper_environment_variables_for_flags(pkg, env) # the given compiler
compiler = pkg.compiler
env.extend(spack.schema.environment.parse(compiler.environment))
if compiler.extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
for item in env_paths:
env.prepend_path("PATH", item)
env.set_path(SPACK_ENV_PATH, env_paths)
# Working directory for the spack command itself, for debug logs. # Working directory for the spack command itself, for debug logs.
if spack.config.get("config:debug"): if spack.config.get("config:debug"):
@@ -525,15 +664,22 @@ def set_wrapper_variables(pkg, env):
lib_path = os.path.join(pkg.prefix, libdir) lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path) rpath_dirs.insert(0, lib_path)
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
pkg.compiler.default_dynamic_linker
)
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path # TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
# branch above). link_dirs should be filtered with entries from _parse_link_paths. # branch above). link_dirs should be filtered with entries from _parse_link_paths.
link_dirs = list(dedupe(filter_system_paths(link_dirs))) link_dirs = list(dedupe(filter_system_paths(link_dirs)))
include_dirs = list(dedupe(filter_system_paths(include_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec) # TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
if default_dynamic_linker_filter: # just this filter.
rpath_dirs = default_dynamic_linker_filter(rpath_dirs) implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
if implicit_rpaths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Spack managed directories include the stage, store and upstream stores. We extend this with # Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS). # their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
@@ -585,6 +731,26 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
# Don't use which for this; we want to find it in the current dir. # Don't use which for this; we want to find it in the current dir.
module.configure = Executable("./configure") module.configure = Executable("./configure")
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
pkg_compiler = None
try:
pkg_compiler = pkg.compiler
except spack.compilers.NoCompilerForSpecError as e:
tty.debug(f"cannot set 'spack_cc': {str(e)}")
if pkg_compiler is not None:
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
else:
module.spack_cc = None
module.spack_cxx = None
module.spack_f77 = None
module.spack_fc = None
# Useful directories within the prefix are encapsulated in # Useful directories within the prefix are encapsulated in
# a Prefix object. # a Prefix object.
module.prefix = pkg.prefix module.prefix = pkg.prefix
@@ -715,6 +881,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths) return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def load_external_modules(pkg):
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
pkg (spack.package_base.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
for external_module in external_modules:
load_module(external_module)
def setup_package(pkg, dirty, context: Context = Context.BUILD): def setup_package(pkg, dirty, context: Context = Context.BUILD):
"""Execute all environment setup routines.""" """Execute all environment setup routines."""
if context not in (Context.BUILD, Context.TEST): if context not in (Context.BUILD, Context.TEST):
@@ -735,6 +916,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
context == Context.TEST and pkg.test_requires_compiler context == Context.TEST and pkg.test_requires_compiler
) )
if need_compiler: if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods) set_wrapper_variables(pkg, env_mods)
# Platform specific setup goes before package specific setup. This is for setting # Platform specific setup goes before package specific setup. This is for setting
@@ -746,26 +928,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
env_mods.extend(setup_context.get_env_modifications()) env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies") tty.debug("setup_package: collected all modifications from dependencies")
tty.debug("setup_package: adding compiler wrappers paths")
env_by_name = env_mods.group_by_name()
for x in env_by_name["SPACK_COMPILER_WRAPPER_PATH"]:
assert isinstance(
x, PrependPath
), "unexpected setting used for SPACK_COMPILER_WRAPPER_PATH"
env_mods.prepend_path("PATH", x.value)
# Check whether we want to force RPATH or RUNPATH
enable_var_name, disable_var_name = "SPACK_ENABLE_NEW_DTAGS", "SPACK_DISABLE_NEW_DTAGS"
if enable_var_name in env_by_name and disable_var_name in env_by_name:
enable_new_dtags = _extract_dtags_arg(env_by_name, var_name=enable_var_name)
disable_new_dtags = _extract_dtags_arg(env_by_name, var_name=disable_var_name)
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
env_mods.set("SPACK_DTAGS_TO_STRIP", enable_new_dtags)
env_mods.set("SPACK_DTAGS_TO_ADD", disable_new_dtags)
else:
env_mods.set("SPACK_DTAGS_TO_STRIP", disable_new_dtags)
env_mods.set("SPACK_DTAGS_TO_ADD", enable_new_dtags)
if context == Context.TEST: if context == Context.TEST:
env_mods.prepend_path("PATH", ".") env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"): elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
@@ -779,7 +941,12 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
# Load modules on an already clean environment, just before applying Spack's # Load modules on an already clean environment, just before applying Spack's
# own environment modifications. This ensures Spack controls CC/CXX/... variables. # own environment modifications. This ensures Spack controls CC/CXX/... variables.
load_external_modules(setup_context) if need_compiler:
tty.debug("setup_package: loading compiler modules")
for mod in pkg.compiler.modules:
load_module(mod)
load_external_modules(pkg)
# Make sure nothing's strange about the Spack environment. # Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn) validate(env_mods, tty.warn)
@@ -790,14 +957,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
return env_base return env_base
def _extract_dtags_arg(env_by_name: Dict[str, ModificationList], *, var_name: str) -> str:
try:
enable_new_dtags = env_by_name[var_name][0].value # type: ignore[union-attr]
except (KeyError, IndexError, AttributeError):
enable_new_dtags = ""
return enable_new_dtags
class EnvironmentVisitor: class EnvironmentVisitor:
def __init__(self, *roots: spack.spec.Spec, context: Context): def __init__(self, *roots: spack.spec.Spec, context: Context):
# For the roots (well, marked specs) we follow different edges # For the roots (well, marked specs) we follow different edges
@@ -1076,21 +1235,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
env.prepend_path("PATH", bin_dir) env.prepend_path("PATH", bin_dir)
def load_external_modules(context: SetupContext) -> None:
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
context: A populated SetupContext object
"""
for spec, _ in context.external:
external_modules = spec.external_modules or []
for external_module in external_modules:
load_module(external_module)
def _setup_pkg_and_run( def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext", serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable, function: Callable,

View File

@@ -11,7 +11,6 @@
import spack.build_environment import spack.build_environment
import spack.builder import spack.builder
import spack.compilers.libraries
import spack.error import spack.error
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
@@ -399,44 +398,33 @@ def _do_patch_libtool(self) -> None:
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper()) markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
# Replace empty linker flag prefixes: # Replace empty linker flag prefixes:
if self.spec.satisfies("%nag"): if self.pkg.compiler.name == "nag":
# Nag is mixed with gcc and g++, which are recognized correctly. # Nag is mixed with gcc and g++, which are recognized correctly.
# Therefore, we change only Fortran values: # Therefore, we change only Fortran values:
nag_pkg = self.spec["fortran"].package
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
x.filter( x.filter(
regex='^wl=""$', regex='^wl=""$',
repl=f'wl="{nag_pkg.linker_arg}"', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
start_at=f"# ### BEGIN {marker}", start_at="# ### BEGIN {0}".format(marker),
stop_at=f"# ### END {marker}", stop_at="# ### END {0}".format(marker),
) )
else: else:
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec) x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
if compiler_spec:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
# Replace empty PIC flag values: # Replace empty PIC flag values:
for compiler, marker in markers.items(): for cc, marker in markers.items():
if compiler == "cc":
language = "c"
elif compiler == "cxx":
language = "cxx"
else:
language = "fortran"
if language not in self.spec:
continue
x.filter( x.filter(
regex='^pic_flag=""$', regex='^pic_flag=""$',
repl=f'pic_flag="{self.spec[language].package.pic_flag}"', repl='pic_flag="{0}"'.format(
start_at=f"# ### BEGIN {marker}", getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
stop_at=f"# ### END {marker}", ),
start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker),
) )
# Other compiler-specific patches: # Other compiler-specific patches:
if self.spec.satisfies("%fj"): if self.pkg.compiler.name == "fj":
x.filter(regex="-nostdlib", repl="", string=True) x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/" rehead = r"/\S*/"
for o in [ for o in [
@@ -449,7 +437,7 @@ def _do_patch_libtool(self) -> None:
r"crtendS\.o", r"crtendS\.o",
]: ]:
x.filter(regex=(rehead + o), repl="") x.filter(regex=(rehead + o), repl="")
elif self.spec.satisfies("%nag"): elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
start_at = "# ### BEGIN {0}".format(marker) start_at = "# ### BEGIN {0}".format(marker)

View File

@@ -70,8 +70,12 @@ class CachedCMakeBuilder(CMakeBuilder):
@property @property
def cache_name(self): def cache_name(self):
compiler_str = f"{self.spec['c'].name}-{self.spec['c'].version}" return "{0}-{1}-{2}@{3}.cmake".format(
return f"{self.pkg.name}-{self.spec.architecture.platform}-{compiler_str}.cmake" self.pkg.name,
self.pkg.spec.architecture,
self.pkg.spec.compiler.name,
self.pkg.spec.compiler.version,
)
@property @property
def cache_path(self): def cache_path(self):
@@ -114,9 +118,7 @@ def initconfig_compiler_entries(self):
# Fortran compiler is optional # Fortran compiler is optional
if "FC" in os.environ: if "FC" in os.environ:
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"]) spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
system_fc_entry = cmake_cache_path( system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
)
else: else:
spack_fc_entry = "# No Fortran compiler defined in spec" spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec" system_fc_entry = "# No Fortran compiler defined in spec"
@@ -132,8 +134,8 @@ def initconfig_compiler_entries(self):
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]), " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
" " + spack_fc_entry, " " + spack_fc_entry,
"else()\n", "else()\n",
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc), " " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx), " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
" " + system_fc_entry, " " + system_fc_entry,
"endif()\n", "endif()\n",
] ]
@@ -276,10 +278,6 @@ def initconfig_hardware_entries(self):
entries.append("# ROCm") entries.append("# ROCm")
entries.append("#------------------{0}\n".format("-" * 30)) entries.append("#------------------{0}\n".format("-" * 30))
if spec.satisfies("^blt@0.7:"):
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
else:
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary # Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix))) entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin llvm_bin = spec["llvm-amdgpu"].prefix.bin
@@ -289,11 +287,8 @@ def initconfig_hardware_entries(self):
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm": if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/") llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append( entries.append(
cmake_cache_filepath( cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
) )
)
archs = self.spec.variants["amdgpu_target"].value archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none": if archs[0] != "none":
arch_str = ";".join(archs) arch_str = ";".join(archs)

View File

@@ -6,13 +6,12 @@
import pathlib import pathlib
import re import re
import sys import sys
from typing import Dict, List, Optional, Sequence, Tuple, Union from typing import Dict, List, Sequence, Tuple, Union
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized from llnl.util.lang import classproperty
import spack import spack.compiler
import spack.compilers.error
import spack.package_base import spack.package_base
import spack.util.executable import spack.util.executable
@@ -44,9 +43,6 @@ class CompilerPackage(spack.package_base.PackageBase):
#: Static definition of languages supported by this class #: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"] compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
#: Relative path to compiler wrappers
compiler_wrapper_link_paths: Dict[str, str] = {}
def __init__(self, spec: "spack.spec.Spec"): def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec) super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages" msg = f"Supported languages for {spec} are not a subset of possible supported languages"
@@ -81,14 +77,14 @@ def executables(cls) -> Sequence[str]:
] ]
@classmethod @classmethod
def determine_version(cls, exe: Path) -> str: def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument version_argument = cls.compiler_version_argument
if isinstance(version_argument, str): if isinstance(version_argument, str):
version_argument = (version_argument,) version_argument = (version_argument,)
for va in version_argument: for va in version_argument:
try: try:
output = compiler_output(exe, version_argument=va) output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output) match = re.search(cls.compiler_version_regex, output)
if match: if match:
return ".".join(match.groups()) return ".".join(match.groups())
@@ -99,11 +95,10 @@ def determine_version(cls, exe: Path) -> str:
f"[{__file__}] Cannot detect a valid version for the executable " f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}" f"{str(exe)}, for package '{cls.name}': {e}"
) )
return ""
@classmethod @classmethod
def compiler_bindir(cls, prefix: Path) -> Path: def compiler_bindir(cls, prefix: Path) -> Path:
"""Overridable method for the location of the compiler bindir within the prefix""" """Overridable method for the location of the compiler bindir within the preifx"""
return os.path.join(prefix, "bin") return os.path.join(prefix, "bin")
@classmethod @classmethod
@@ -147,109 +142,3 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple: def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses # path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)} return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
#: Returns the argument needed to set the RPATH, or None if it does not exist
rpath_arg: Optional[str] = "-Wl,-rpath,"
#: Flag that needs to be used to pass an argument to the linker
linker_arg: str = "-Wl,"
#: Flag used to produce Position Independent Code
pic_flag: str = "-fPIC"
#: Flag used to get verbose output
verbose_flags: str = "-v"
#: Flag to activate OpenMP support
openmp_flag: str = "-fopenmp"
implicit_rpath_libs: List[str] = []
def standard_flag(self, *, language: str, standard: str) -> str:
"""Returns the flag used to enforce a given standard for a language"""
if language not in self.supported_languages:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' language"
)
try:
return self._standard_flag(language=language, standard=standard)
except (KeyError, RuntimeError) as e:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' standard {standard}"
) from e
def _standard_flag(self, *, language: str, standard: str) -> str:
raise NotImplementedError("Must be implemented by derived classes")
def archspec_name(self) -> str:
"""Name that archspec uses to refer to this compiler"""
return self.spec.name
@property
def cc(self) -> Optional[str]:
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("c", None)
return self._cc_path()
def _cc_path(self) -> Optional[str]:
"""Returns the path to the C compiler, if the package was installed by Spack"""
return None
@property
def cxx(self) -> Optional[str]:
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("cxx", None)
return self._cxx_path()
def _cxx_path(self) -> Optional[str]:
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
return None
@property
def fortran(self):
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("fortran", None)
return self._fortran_path()
def _fortran_path(self) -> Optional[str]:
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
return None
@memoized
def _compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Returns the output from the compiler invoked with the given version argument.
Args:
compiler_path: path of the compiler to be invoked
version_argument: the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
if not version_argument:
return compiler(
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
)
return compiler(
version_argument,
output=str,
error=str,
ignore_errors=ignore_errors,
timeout=120,
fail_on_error=True,
)
def compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Wrapper for _get_compiler_version_output()."""
# This ensures that we memoize compiler output by *absolute path*,
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
compiler_path = spack.util.executable.which_string(str(compiler_path), required=True)
return _compiler_output(
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
)

View File

@@ -76,7 +76,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change Override this method to select a specific version of the toolchain or change
selection heuristics. selection heuristics.
Default is whatever version of msvc has been selected by concretization""" Default is whatever version of msvc has been selected by concretization"""
return "v" + self.spec["msvc"].package.platform_toolset_ver return "v" + self.pkg.compiler.platform_toolset_ver
@property @property
def std_msbuild_args(self): def std_msbuild_args(self):

View File

@@ -278,6 +278,10 @@ def update_external_dependencies(self, extendee_spec=None):
if not python.architecture.target: if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name python.architecture.target = archspec.cpu.host().family.name
# Ensure compiler information is present
if not python.compiler:
python.compiler = self.spec.compiler
python.external_path = self.spec.external_path python.external_path = self.spec.external_path
python._mark_concrete() python._mark_concrete()
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=()) self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())

View File

@@ -6,7 +6,6 @@
import codecs import codecs
import json import json
import os import os
import pathlib
import re import re
import shutil import shutil
import stat import stat
@@ -14,7 +13,7 @@
import tempfile import tempfile
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Set, Union from typing import Callable, Dict, List, Set
from urllib.request import Request from urllib.request import Request
import llnl.path import llnl.path
@@ -24,6 +23,8 @@
import spack import spack
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.builder
import spack.concretize
import spack.config as cfg import spack.config as cfg
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
@@ -32,7 +33,6 @@
import spack.paths import spack.paths
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.store
import spack.util.git import spack.util.git
import spack.util.gpg as gpg_util import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
@@ -41,7 +41,6 @@
from spack import traverse from spack import traverse
from spack.error import SpackError from spack.error import SpackError
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.version import GitVersion, StandardVersion
from .common import ( from .common import (
IS_WINDOWS, IS_WINDOWS,
@@ -80,45 +79,6 @@ def get_change_revisions():
return None, None return None, None
def get_added_versions(
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
path: str,
from_ref: str = "HEAD~1",
to_ref: str = "HEAD",
) -> List[Union[StandardVersion, GitVersion]]:
"""Get a list of the versions added between `from_ref` and `to_ref`.
Args:
checksums_version_dict (Dict): all package versions keyed by known checksums.
path (str): path to the package.py
from_ref (str): oldest git ref, defaults to `HEAD~1`
to_ref (str): newer git ref, defaults to `HEAD`
Returns: list of versions added between refs
"""
git_exe = spack.util.git.git(required=True)
# Gather git diff
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
# Store added and removed versions
# Removed versions are tracked here to determine when versions are moved in a file
# and show up as both added and removed in a git diff.
added_checksums = set()
removed_checksums = set()
# Scrape diff for modified versions and prune added versions if they show up
# as also removed (which means they've actually just moved in the file and
# we shouldn't need to rechecksum them)
for checksum in checksums_version_dict.keys():
for line in diff_lines:
if checksum in line:
if line.startswith("+"):
added_checksums.add(checksum)
if line.startswith("-"):
removed_checksums.add(checksum)
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
"""Given an environment manifest path and two revisions to compare, return """Given an environment manifest path and two revisions to compare, return
whether or not the stack was changed. Returns True if the environment whether or not the stack was changed. Returns True if the environment
@@ -264,7 +224,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str: def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
reason_msg = ", ".join(reasons) reason_msg = ", ".join(reasons)
spec_fmt = "{name}{@version}{/hash:7}{%compiler}" spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
if not prune: if not prune:
status = colorize("@*g{[x]} ") status = colorize("@*g{[x]} ")
@@ -420,6 +380,7 @@ def generate_pipeline(env: ev.Environment, args) -> None:
args: (spack.main.SpackArgumentParser): Parsed arguments from the command args: (spack.main.SpackArgumentParser): Parsed arguments from the command
line. line.
""" """
with spack.concretize.disable_compiler_existence_check():
with env.write_transaction(): with env.write_transaction():
env.concretize() env.concretize()
env.write() env.write()
@@ -620,25 +581,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
tty.debug(f"job spec: {job_spec}") tty.debug(f"job spec: {job_spec}")
try: try:
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec)) pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
except spack.error.SpackError as e: job_pkg = pkg_cls(job_spec)
tty.error(f"Cannot copy logs: {str(e)}") tty.debug(f"job package: {job_pkg}")
except AssertionError:
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
tty.error(msg)
return return
# Get the package's archived files stage_dir = job_pkg.stage.path
archive_files = [] tty.debug(f"stage dir: {stage_dir}")
archive_root = package_metadata_root / "archived-files" for file in [
if archive_root.is_dir(): job_pkg.log_path,
archive_files = [f for f in archive_root.rglob("*") if f.is_file()] job_pkg.env_mods_path,
else: *spack.builder.create(job_pkg).archive_files,
msg = "Cannot copy package archived files: archived-files must be a directory" ]:
tty.warn(msg) copy_files_to_artifacts(file, job_log_dir)
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
build_env_mods = package_metadata_root / "spack-build-env.txt"
for f in [build_log_zipped, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir)
def copy_test_logs_to_artifacts(test_stage, job_test_dir): def copy_test_logs_to_artifacts(test_stage, job_test_dir):

View File

@@ -209,8 +209,10 @@ def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
Returns: (str) given spec's CDash build name.""" Returns: (str) given spec's CDash build name."""
if spec: if spec:
spec_str = spec.format("{name}{@version}{%compiler} hash={hash} arch={architecture}") build_name = (
build_name = f"{spec_str} ({self.build_group})" f"{spec.name}@{spec.version}%{spec.compiler} "
f"hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
)
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}") tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
return build_name return build_name

View File

@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
if len(matching_specs) <= 1: if len(matching_specs) <= 1:
return return
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}" format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
args = ["%s matches multiple packages." % spec, "Matching packages:"] args = ["%s matches multiple packages." % spec, "Matching packages:"]
args += [ args += [
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
@@ -375,13 +375,8 @@ def iter_groups(specs, indent, all_headers):
index = index_by(specs, ("architecture", "compiler")) index = index_by(specs, ("architecture", "compiler"))
ispace = indent * " " ispace = indent * " "
def _key(item):
if item is None:
return ""
return str(item)
# Traverse the index and print out each package # Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index, key=_key)): for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: if i > 0:
print() print()
@@ -453,6 +448,7 @@ def get_arg(name, default=None):
hashes = get_arg("long", False) hashes = get_arg("long", False)
namespaces = get_arg("namespaces", False) namespaces = get_arg("namespaces", False)
flags = get_arg("show_flags", False) flags = get_arg("show_flags", False)
full_compiler = get_arg("show_full_compiler", False)
variants = get_arg("variants", False) variants = get_arg("variants", False)
groups = get_arg("groups", True) groups = get_arg("groups", True)
all_headers = get_arg("all_headers", False) all_headers = get_arg("all_headers", False)
@@ -474,10 +470,13 @@ def get_arg(name, default=None):
if format_string is None: if format_string is None:
nfmt = "{fullname}" if namespaces else "{name}" nfmt = "{fullname}" if namespaces else "{name}"
ffmt = "" ffmt = ""
if flags: if full_compiler or flags:
ffmt += "{%compiler.name}"
if full_compiler:
ffmt += "{@compiler.version}"
ffmt += " {compiler_flags}" ffmt += " {compiler_flags}"
vfmt = "{variants}" if variants else "" vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + vfmt + ffmt format_string = nfmt + "{@version}" + ffmt + vfmt
def fmt(s, depth=0): def fmt(s, depth=0):
"""Formatter function for all output specs""" """Formatter function for all output specs"""

View File

@@ -4,15 +4,12 @@
import json import json
import os import os
import re
import shutil import shutil
import sys
from typing import Dict
from urllib.parse import urlparse, urlunparse from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr import llnl.util.tty.color as clr
from llnl.util import tty
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.ci as spack_ci import spack.ci as spack_ci
@@ -21,22 +18,12 @@
import spack.cmd.common.arguments import spack.cmd.common.arguments
import spack.config as cfg import spack.config as cfg
import spack.environment as ev import spack.environment as ev
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht import spack.hash_types as ht
import spack.mirrors.mirror import spack.mirrors.mirror
import spack.package_base
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.executable
import spack.util.git
import spack.util.gpg as gpg_util import spack.util.gpg as gpg_util
import spack.util.timer as timer import spack.util.timer as timer
import spack.util.url as url_util import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
import spack.version
description = "manage continuous integration pipelines" description = "manage continuous integration pipelines"
section = "build" section = "build"
@@ -45,7 +32,6 @@
SPACK_COMMAND = "spack" SPACK_COMMAND = "spack"
INSTALL_FAIL_CODE = 1 INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100 FAILED_CREATE_BUILDCACHE_CODE = 100
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
def deindent(desc): def deindent(desc):
@@ -205,16 +191,6 @@ def setup_parser(subparser):
reproduce.set_defaults(func=ci_reproduce) reproduce.set_defaults(func=ci_reproduce)
# Verify checksums inside of ci workflows
verify_versions = subparsers.add_parser(
"verify-versions",
description=deindent(ci_verify_versions.__doc__),
help=spack.cmd.first_line(ci_verify_versions.__doc__),
)
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
verify_versions.set_defaults(func=ci_verify_versions)
def ci_generate(args): def ci_generate(args):
"""generate jobs file from a CI-aware spack file """generate jobs file from a CI-aware spack file
@@ -451,7 +427,7 @@ def ci_rebuild(args):
# Arguments when installing the root from sources # Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"] deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--only=package"] root_install_args = install_args + ["--keep-stage", "--only=package"]
if cdash_handler: if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting. # Add additional arguments to `spack install` for CDash reporting.
@@ -488,7 +464,8 @@ def ci_rebuild(args):
job_spec.to_dict(hash=ht.dag_hash), job_spec.to_dict(hash=ht.dag_hash),
) )
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now # We generated the "spack install ..." command to "--keep-stage", copy
# any logs from the staging directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir) spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# If the installation succeeded and we're running stand-alone tests for # If the installation succeeded and we're running stand-alone tests for
@@ -683,159 +660,6 @@ def _gitlab_artifacts_url(url: str) -> str:
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query="")) return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
def validate_standard_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the checksum of a package version based on a tarball.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
url_dict: Dict[spack.version.StandardVersion, str] = {}
for version in versions:
url = pkg.find_valid_url_for_version(version)
url_dict[version] = url
version_hashes = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, fetch_options=pkg.fetch_options
)
valid_checksums = True
for version, sha in version_hashes.items():
if sha != pkg.versions[version]["sha256"]:
tty.error(
f"Invalid checksum found {pkg.name}@{version}\n"
f" [package.py] {pkg.versions[version]['sha256']}\n"
f" [Downloaded] {sha}"
)
valid_checksums = False
continue
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
return valid_checksums
def validate_git_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the commit and tag of a package version based on a git repository.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
valid_commit = True
for version in versions:
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
with spack.stage.Stage(fetcher) as stage:
known_commit = pkg.versions[version]["commit"]
try:
stage.fetch()
except spack.error.FetchError:
tty.error(
f"Invalid commit for {pkg.name}@{version}\n"
f" {known_commit} could not be checked out in the git repository."
)
valid_commit = False
continue
# Test if the specified tag matches the commit in the package.py
# We retrieve the commit associated with a tag and compare it to the
# commit that is located in the package.py file.
if "tag" in pkg.versions[version]:
tag = pkg.versions[version]["tag"]
try:
with fs.working_dir(stage.source_path):
found_commit = fetcher.git(
"rev-list", "-n", "1", tag, output=str, error=str
).strip()
except spack.util.executable.ProcessError:
tty.error(
f"Invalid tag for {pkg.name}@{version}\n"
f" {tag} could not be found in the git repository."
)
valid_commit = False
continue
if found_commit != known_commit:
tty.error(
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
f" [package.py] {known_commit}\n"
f" [Downloaded] {found_commit}"
)
valid_commit = False
continue
# If we have downloaded the repository, found the commit, and compared
# the tag (if specified) we can conclude that the version is pointing
# at what we would expect.
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
return valid_commit
def ci_verify_versions(args):
"""validate version checksum & commits between git refs
This command takes a from_ref and to_ref arguments and
then parses the git diff between the two to determine which packages
have been modified verifies the new checksums inside of them.
"""
with fs.working_dir(spack.paths.prefix):
# We use HEAD^1 explicitly on the merge commit created by
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
# Get a list of package names from the modified files.
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
failed_version = False
for pkg_name, path in pkgs:
spec = spack.spec.Spec(pkg_name)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
# Skip checking manual download packages and trust the maintainers
if pkg.manual_download:
tty.warn(f"Skipping manual download package: {pkg_name}")
continue
# Store versions checksums / commits for future loop
checksums_version_dict = {}
commits_version_dict = {}
for version in pkg.versions:
# If the package version defines a sha256 we'll use that as the high entropy
# string to detect which versions have been added between from_ref and to_ref
if "sha256" in pkg.versions[version]:
checksums_version_dict[pkg.versions[version]["sha256"]] = version
# If a package version instead defines a commit we'll use that as a
# high entropy string to detect new versions.
elif "commit" in pkg.versions[version]:
commits_version_dict[pkg.versions[version]["commit"]] = version
# TODO: enforce every version have a commit or a sha256 defined if not
# an infinite version (there are a lot of package's where this doesn't work yet.)
with fs.working_dir(spack.paths.prefix):
added_checksums = spack_ci.get_added_versions(
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
added_commits = spack_ci.get_added_versions(
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
if added_checksums:
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
if added_commits:
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
if failed_version:
sys.exit(1)
def ci(parser, args): def ci(parser, args):
if args.func: if args.func:
return args.func(args) return args.func(args)

View File

@@ -4,14 +4,13 @@
import argparse import argparse
import sys import sys
import warnings
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import index_by from llnl.util.lang import index_by
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.compilers.config import spack.compilers
import spack.config import spack.config
import spack.spec import spack.spec
from spack.cmd.common import arguments from spack.cmd.common import arguments
@@ -34,20 +33,20 @@ def setup_parser(subparser):
mixed_toolchain_group.add_argument( mixed_toolchain_group.add_argument(
"--mixed-toolchain", "--mixed-toolchain",
action="store_true", action="store_true",
default=False, default=sys.platform == "darwin",
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)", help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
) )
mixed_toolchain_group.add_argument( mixed_toolchain_group.add_argument(
"--no-mixed-toolchain", "--no-mixed-toolchain",
action="store_false", action="store_false",
dest="mixed_toolchain", dest="mixed_toolchain",
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)", help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
) )
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER) find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument( find_parser.add_argument(
"--scope", "--scope",
action=arguments.ConfigScope, action=arguments.ConfigScope,
default=lambda: spack.config.default_modify_scope("packages"), default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify", help="configuration scope to modify",
) )
arguments.add_common_arguments(find_parser, ["jobs"]) arguments.add_common_arguments(find_parser, ["jobs"])
@@ -80,97 +79,77 @@ def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and """Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration. add them to Spack's configuration.
""" """
if args.mixed_toolchain:
warnings.warn(
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
"has no effect. The option will be removed in Spack v1.1"
)
paths = args.add_paths or None paths = args.add_paths or None
new_compilers = spack.compilers.config.find_compilers( new_compilers = spack.compilers.find_compilers(
path_hints=paths, scope=args.scope, max_workers=args.jobs path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
) )
if new_compilers: if new_compilers:
n = len(new_compilers) n = len(new_compilers)
s = "s" if n > 1 else "" s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages") filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}") tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers) compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4) colify(reversed(compiler_strs), indent=4)
else: else:
tty.msg("Found no new compilers") tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:") tty.msg("Compilers are defined in the following files:")
colify(spack.compilers.config.compiler_config_files(), indent=4) colify(spack.compilers.compiler_config_files(), indent=4)
def compiler_remove(args): def compiler_remove(args):
remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG) compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope) candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
if not candidates:
tty.die(f"No compiler matches '{args.compiler_spec}'")
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates)) if not candidate_compilers:
tty.die("No compilers match spec %s" % compiler_spec)
if not args.all and len(candidates) > 1: if not args.all and len(candidate_compilers) > 1:
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':") tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
print() colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
colify(compiler_strs, indent=4) tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
print()
print(
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
" to remove all of them."
)
sys.exit(1) sys.exit(1)
remover.flush() for current_compiler in candidate_compilers:
tty.msg("The following compilers have been removed:") spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
print() tty.msg(f"{current_compiler.spec.display_str} has been removed")
colify(compiler_strs, indent=4)
print()
def compiler_info(args): def compiler_info(args):
"""Print info about all compilers matching a spec.""" """Print info about all compilers matching a spec."""
query = spack.spec.Spec(args.compiler_spec) cspec = spack.spec.CompilerSpec(args.compiler_spec)
all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False) compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
compilers = [x for x in all_compilers if x.satisfies(query)]
if not compilers: if not compilers:
tty.die(f"No compilers match spec {query.cformat()}") tty.die("No compilers match spec %s" % cspec)
else: else:
for c in compilers: for c in compilers:
print(f"{c.cformat()}:") print(c.spec.display_str + ":")
print(f" prefix: {c.external_path}") print("\tpaths:")
extra_attributes = getattr(c, "extra_attributes", {}) for cpath in ["cc", "cxx", "f77", "fc"]:
if "compilers" in extra_attributes: print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
print(" compilers:") if c.flags:
for language, exe in extra_attributes.get("compilers", {}).items(): print("\tflags:")
print(f" {language}: {exe}") for flag, flag_value in c.flags.items():
if "flags" in extra_attributes: print("\t\t%s = %s" % (flag, flag_value))
print(" flags:") if len(c.environment) != 0:
for flag, flag_value in extra_attributes["flags"].items(): if len(c.environment.get("set", {})) != 0:
print(f" {flag} = {flag_value}")
if "environment" in extra_attributes:
environment = extra_attributes["environment"]
if len(environment.get("set", {})) != 0:
print("\tenvironment:") print("\tenvironment:")
print("\t set:") print("\t set:")
for key, value in environment["set"].items(): for key, value in c.environment["set"].items():
print(f"\t {key} = {value}") print("\t %s = %s" % (key, value))
if "extra_rpaths" in extra_attributes: if c.extra_rpaths:
print(" extra rpaths:") print("\tExtra rpaths:")
for extra_rpath in extra_attributes["extra_rpaths"]: for extra_rpath in c.extra_rpaths:
print(f" {extra_rpath}") print("\t\t%s" % extra_rpath)
if getattr(c, "external_modules", []): print("\tmodules = %s" % c.modules)
print(" modules: ") print("\toperating system = %s" % c.operating_system)
for module in c.external_modules:
print(f" {module}")
print()
def compiler_list(args): def compiler_list(args):
compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False) compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
# If there are no compilers in any scope, and we're outputting to a tty, give a # If there are no compilers in any scope, and we're outputting to a tty, give a
# hint to the user. # hint to the user.
@@ -183,7 +162,7 @@ def compiler_list(args):
tty.msg(msg) tty.msg(msg)
return return
index = index_by(compilers, spack.compilers.config.name_os_target) index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
tty.msg("Available compilers") tty.msg("Available compilers")
@@ -202,10 +181,10 @@ def compiler_list(args):
name, os, target = key name, os, target = key
os_str = os os_str = os
if target: if target:
os_str += f"-{target}" os_str += "-%s" % target
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}" cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
tty.hline(colorize(cname), char="-") tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers))) colify(reversed(sorted(c.spec.display_str for c in compilers)))
def compiler(parser, args): def compiler(parser, args):

View File

@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
if spack.config.get(key_path, scope=scope): if spack.config.get(key_path, scope=scope):
ideal_scope_to_modify = scope ideal_scope_to_modify = scope
break break
# If we find our key in a specific scope, that's the one we want
# to modify. Otherwise we use the default write scope.
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
update_path = f"{key_path}:[{str(spec)}]" update_path = f"{key_path}:[{str(spec)}]"
spack.config.add(update_path, scope=write_scope) spack.config.add(update_path, scope=ideal_scope_to_modify)
else: else:
raise ValueError("'config change' can currently only change 'require' sections") raise ValueError("'config change' can currently only change 'require' sections")
@@ -521,6 +518,8 @@ def config_prefer_upstream(args):
for spec in pref_specs: for spec in pref_specs:
# Collect all the upstream compilers and versions for this package. # Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": []}) pkg = pkgs.get(spec.name, {"version": []})
all = pkgs.get("all", {"compiler": []})
pkgs["all"] = all
pkgs[spec.name] = pkg pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version. # We have no existing variant if this is our first added version.
@@ -530,6 +529,10 @@ def config_prefer_upstream(args):
if version not in pkg["version"]: if version not in pkg["version"]:
pkg["version"].append(version) pkg["version"].append(version)
compiler = str(spec.compiler)
if compiler not in all["compiler"]:
all["compiler"].append(compiler)
# Get and list all the variants that differ from the default. # Get and list all the variants that differ from the default.
variants = [] variants = []
for var_name, variant in spec.variants.items(): for var_name, variant in spec.variants.items():

View File

@@ -55,7 +55,7 @@ def dependencies(parser, args):
env = ev.active_environment() env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env) spec = spack.cmd.disambiguate_spec(specs[0], env)
format_string = "{name}{@version}{/hash:7}{%compiler}" format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty(): if sys.stdout.isatty():
tty.msg("Dependencies of %s" % spec.format(format_string, color=True)) tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
deps = spack.store.STORE.db.installed_relatives( deps = spack.store.STORE.db.installed_relatives(

Some files were not shown because too many files have changed in this diff Show More