Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
bf853a6978 bootstrap: ensure compatible libc 2025-03-10 17:26:27 +01:00
719 changed files with 11181 additions and 12663 deletions

View File

@@ -9,7 +9,6 @@ on:
branches:
- develop
- releases/**
merge_group:
concurrency:
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -26,17 +25,13 @@ jobs:
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
# For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
id: filter
with:
# For merge group events, compare against the target branch (main)
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
# For merge group events, use the merge group head ref
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
# Don't run if we only modified packages in the
# built-in repository or documentation
@@ -81,11 +76,10 @@ jobs:
prechecks:
needs: [ changes ]
uses: ./.github/workflows/prechecks.yml
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
with_packages: ${{ needs.changes.outputs.packages }}
import-check:
needs: [ changes ]
@@ -99,7 +93,7 @@ jobs:
- name: Success
run: |
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
else
exit 0
@@ -107,7 +101,6 @@ jobs:
coverage:
needs: [ unit-tests, prechecks ]
if: ${{ needs.changes.outputs.core }}
uses: ./.github/workflows/coverage.yml
secrets: inherit
@@ -120,10 +113,10 @@ jobs:
- name: Status summary
run: |
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
echo "Bootstrap tests failed."
echo "Bootstrap tests failed."
exit 1
else
exit 0

View File

@@ -1,4 +1,4 @@
name: prechecks
name: style
on:
workflow_call:
@@ -6,9 +6,6 @@ on:
with_coverage:
required: true
type: string
with_packages:
required: true
type: string
concurrency:
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -33,7 +30,6 @@ jobs:
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -57,25 +53,12 @@ jobs:
- name: Run style tests
run: |
share/spack/qa/run-style-tests
audit:
uses: ./.github/workflows/audit.yaml
secrets: inherit
with:
with_coverage: ${{ inputs.with_coverage }}
python_version: '3.13'
verify-checksums:
if: ${{ inputs.with_packages == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 2
- name: Verify Added Checksums
run: |
bin/spack ci verify-versions HEAD^1 HEAD
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8:
runs-on: ubuntu-latest

View File

@@ -19,7 +19,7 @@ config:
install_tree:
root: $spack/opt/spack
projections:
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# install_tree can include an optional padded length (int or boolean)
# default is False (do not pad)
# if padded_length is True, Spack will pad as close to the system max path

View File

@@ -15,11 +15,12 @@
# -------------------------------------------------------------------------
packages:
all:
compiler:
- apple-clang
- clang
- gcc
providers:
c: [apple-clang, llvm, gcc]
cxx: [apple-clang, llvm, gcc]
elf: [libelf]
fortran: [gcc]
fuse: [macfuse]
gl: [apple-gl]
glu: [apple-glu]
@@ -49,12 +50,3 @@ packages:
# although the version number used here isn't critical
- spec: apple-libuuid@1353.100.2
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
c:
prefer:
- apple-clang
cxx:
prefer:
- apple-clang
fortran:
prefer:
- gcc

View File

@@ -15,18 +15,19 @@
# -------------------------------------------------------------------------
packages:
all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
providers:
awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis]
c: [gcc, llvm, intel-oneapi-compilers]
cxx: [gcc, llvm, intel-oneapi-compilers]
c: [gcc]
cxx: [gcc]
D: [ldc]
daal: [intel-oneapi-daal]
elf: [elfutils]
fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame]
fortran: [gcc, llvm, intel-oneapi-compilers]
fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse]
gl: [glx, osmesa]

View File

@@ -15,8 +15,8 @@
# -------------------------------------------------------------------------
packages:
all:
compiler:
- msvc
providers:
c : [msvc]
cxx: [msvc]
mpi: [msmpi]
gl: [wgl]

View File

@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
version (and other constraints) passed as the spec argument to the
``spack develop`` command.
When working deep in the graph it is often desirable to have multiple specs marked
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
to ensure that all the dependents of the initial spec you provide are also marked
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
so the graph can be traversed from the supplied spec all the way to the root specs.
For packages with ``git`` attributes, git branches, tags, and commits can
also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone

View File

@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
.. note::
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
The following set of criteria (from lowest to highest precedence) explain
common cases where concretization output may seem surprising at first.
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
concretizer:
reuse: dependencies # other options are 'true' and 'false'
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
are higher priority than reuse, and can be used to strongly prefer a specific version
or variant, without erroring out if it's not possible. Strong preferences are specified
as follows:
.. code-block:: yaml
packages:
foo:
prefer:
- "@1.1: ~mpi"
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
and constraints from the command line as well as ``package.py`` files override all
of the above. Requirements are specified as follows:
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
foo:
require:
- "@1.2: +mpi"
conflicts:
- "@1.4"
Requirements and constraints restrict the set of possible solutions, while reuse
behavior and preferences influence what an optimal solution looks like.

View File

@@ -486,8 +486,6 @@ present. For instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-strong-preferences:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Conflicts and strong preferences
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
- spack --version
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi

1
lib/spack/env/aocc/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/aocc/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/aocc/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/arm/armclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/c++ vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c89 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c99 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -36,9 +36,15 @@ readonly lsep=''
# the script runs. They are set by routines in spack.build_environment
# as part of the package installation process.
readonly params="\
SPACK_COMPILER_WRAPPER_PATH
SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG
SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG
SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS
SPACK_MANAGED_DIRS"
@@ -339,9 +345,6 @@ case "$command" in
;;
ld|ld.gold|ld.lld)
mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;;
*)
die "Unknown compiler: $command"
@@ -396,12 +399,10 @@ fi
#
dtags_to_add="${SPACK_DTAGS_TO_ADD}"
dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
linker_arg="ERROR: LINKER ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
eval "linker_arg=\${SPACK_${comp}_LINKER_ARG:?${linker_arg}}"
linker_arg="${SPACK_LINKER_ARG}"
# Set up rpath variable according to language.
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
rpath="ERROR: RPATH ARG WAS NOT SET"
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
# Dump the mode and exit if the command is dump-mode.
@@ -410,6 +411,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
exit
fi
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
# *have* to set up Fortran executables, so we need to tell the user when a build is
# about to attempt to use them unsuccessfully.
if [ -z "$command" ]; then
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
fi
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself
@@ -418,7 +426,7 @@ new_dirs=""
IFS=':'
for dir in $PATH; do
addpath=true
for spack_env_dir in $SPACK_COMPILER_WRAPPER_PATH; do
for spack_env_dir in $SPACK_ENV_PATH; do
case "${dir%%/}" in
"$spack_env_dir"|'.'|'')
addpath=false
@@ -779,17 +787,15 @@ case "$mode" in
C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS
preextend flags_list SPACK_TARGET_ARGS_CC
;;
CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS
preextend flags_list SPACK_TARGET_ARGS_CXX
;;
F)
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
;;
esac
# prepend target args
preextend flags_list SPACK_TARGET_ARGS
;;
esac

1
lib/spack/env/cce/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/cce/cc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/craycc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/crayftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cpp vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f77 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f90 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f95 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fc vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fj/case-insensitive/FCC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/fj/fcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/fj/frt vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/gcc/g++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icpc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/ifort vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.gold vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.lld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/nag/nagfor vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/dpcpp vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icpx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/ifx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/rocmcc/amdflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/xl/xlc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf90 vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc++_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf90_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -73,7 +73,7 @@ def index_by(objects, *funcs):
if isinstance(f, str):
f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {}
for o in objects:
@@ -1016,8 +1016,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
def grouped_message(self, with_tracebacks: bool = True) -> str:
"""Print out an error message coalescing all the forwarded errors."""
each_exception_message = [
"\n\t{0} raised {1}: {2}\n{3}".format(
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
"{0} raised {1}: {2}{3}".format(
context,
exc.__class__.__name__,
exc,
"\n{0}".format("".join(tb)) if with_tracebacks else "",
)
for context, exc, tb in self.exceptions
]

View File

@@ -1,20 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
BUILTIN_TO_LEGACY_COMPILER = {
"llvm": "clang",
"intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc",
"intel-oneapi-compiler-classic": "intel",
"acfl": "arm",
}
LEGACY_COMPILER_TO_BUILTIN = {
"clang": "llvm",
"oneapi": "intel-oneapi-compilers",
"rocmcc": "llvm-amdgpu",
"intel": "intel-oneapi-compiler-classic",
"arm": "acfl",
}

View File

@@ -110,13 +110,6 @@ def __init__(self, root):
self._write_transaction_impl = llnl.util.lang.nullcontext
self._read_transaction_impl = llnl.util.lang.nullcontext
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
if not self.is_readable():
raise spack_db.DatabaseNotReadableError(
f"cannot read buildcache v{self.db_version} at {self.root}"
)
return self._handle_current_version_read(check, db)
class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list."""
@@ -249,7 +242,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
self._index_file_cache.init_entry(cache_key)
cache_path = self._index_file_cache.cache_path(cache_key)
with self._index_file_cache.read_transaction(cache_key):
db._read_from_file(pathlib.Path(cache_path))
db._read_from_file(cache_path)
except spack_db.InvalidDatabaseVersionError as e:
tty.warn(
f"you need a newer Spack version to read the buildcache index for the "

View File

@@ -24,6 +24,7 @@
import spack.store
import spack.util.environment
import spack.util.executable
import spack.util.libc
from .config import spec_for_current_python
@@ -233,10 +234,21 @@ def _root_spec(spec_str: str) -> str:
"""
# Add a compiler and platform requirement to the root spec.
platform = str(spack.platforms.host())
spec_str += f" platform={platform}"
target = archspec.cpu.host().family
spec_str += f" target={target}"
if platform == "darwin":
spec_str += " %apple-clang"
elif platform == "windows":
spec_str += " %msvc"
elif platform == "linux":
spec_str += " %gcc"
libc = spack.util.libc.libc_from_current_python_process()
if libc:
spec_str += f" ^[virtuals=libc] {libc.name}@:{libc.version}"
elif platform == "freebsd":
spec_str += " %clang"
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
return spec_str

View File

@@ -15,13 +15,11 @@
import archspec.cpu
import spack.compilers.config
import spack.compilers.libraries
import spack.config
import spack.compiler
import spack.compilers
import spack.platforms
import spack.spec
import spack.traverse
import spack.version
from .config import spec_for_current_python
@@ -40,7 +38,7 @@ def __init__(self, configuration):
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self):
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux":
compiler_name = "gcc"
elif str(self.host_platform) == "darwin":
@@ -48,30 +46,17 @@ def _valid_compiler_or_raise(self):
elif str(self.host_platform) == "windows":
compiler_name = "msvc"
elif str(self.host_platform) == "freebsd":
compiler_name = "llvm"
compiler_name = "clang"
else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = [
x
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
if x.name == compiler_name
]
candidates = spack.compilers.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture
)
if not candidates:
raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
)
candidates.sort(key=lambda x: x.version, reverse=True)
best = candidates[0]
# Get compilers for bootstrapping from the 'builtin' repository
best.namespace = "builtin"
# If the compiler does not support C++ 14, fail with a legible error message
try:
_ = best.package.standard_flag(language="cxx", standard="14")
except RuntimeError as e:
raise RuntimeError(
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
) from e
candidates.sort(key=lambda x: x.spec.version, reverse=True)
return candidates[0]
def _externals_from_yaml(
@@ -90,6 +75,9 @@ def _externals_from_yaml(
if not s.satisfies(requirements[pkg_name]):
continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"):
continue
@@ -122,14 +110,11 @@ def concretize(self) -> "spack.spec.Spec":
# Tweak it to conform to the host architecture
for node in s.traverse():
node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture
if node.name == "gcc-runtime":
node.versions = self.host_compiler.versions
# Can't use re2c@3.1 with Python 3.6
if self.host_python.satisfies("@3.6"):
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python":
@@ -141,9 +126,6 @@ def concretize(self) -> "spack.spec.Spec":
if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake
if edge.spec.name == self.host_compiler.name:
edge.spec = self.host_compiler
if "libc" in edge.virtuals:
edge.spec = self.host_libc
@@ -159,12 +141,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec":
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
result = detector.default_libc()
result = self.host_compiler.default_libc
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = []

View File

@@ -10,7 +10,7 @@
from llnl.util import tty
import spack.compilers.config
import spack.compilers
import spack.config
import spack.environment
import spack.modules
@@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch()
if not spack.compilers.config.compilers_for_arch(arch):
spack.compilers.config.find_compilers()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
@contextlib.contextmanager

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -36,6 +36,7 @@
import multiprocessing
import os
import re
import stat
import sys
import traceback
import types
@@ -70,7 +71,7 @@
import spack.build_systems.meson
import spack.build_systems.python
import spack.builder
import spack.compilers.libraries
import spack.compilers
import spack.config
import spack.deptypes as dt
import spack.error
@@ -84,6 +85,7 @@
import spack.store
import spack.subprocess_context
import spack.util.executable
import spack.util.libc
from spack import traverse
from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError
@@ -91,8 +93,6 @@
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications,
ModificationList,
PrependPath,
env_flag,
filter_system_paths,
get_path,
@@ -113,7 +113,7 @@
# set_wrapper_variables and used to pass parameters to
# Spack's compiler wrappers.
#
SPACK_COMPILER_WRAPPER_PATH = "SPACK_COMPILER_WRAPPER_PATH"
SPACK_ENV_PATH = "SPACK_ENV_PATH"
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
@@ -390,10 +390,62 @@ def _add_werror_handling(keep_werror, env):
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
def set_wrapper_environment_variables_for_flags(pkg, env):
def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete
compiler = pkg.compiler
spec = pkg.spec
# Make sure the executables for this compiler exist
compiler.verify_executables()
# Set compiler variables used by CMake and autotools
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
# Populate an object with the list of environment modifications
# and return it
# TODO : add additional kwargs for better diagnostics, like requestor,
# ttyout, ttyerr, etc.
link_dir = spack.paths.build_env_path
# Set SPACK compiler variables so that our wrapper knows what to
# call. If there is no compiler configured then use a default
# wrapper which will emit an error if it is used.
if compiler.cc:
env.set("SPACK_CC", compiler.cc)
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
else:
env.set("CC", os.path.join(link_dir, "cc"))
if compiler.cxx:
env.set("SPACK_CXX", compiler.cxx)
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
else:
env.set("CC", os.path.join(link_dir, "c++"))
if compiler.f77:
env.set("SPACK_F77", compiler.f77)
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
else:
env.set("F77", os.path.join(link_dir, "f77"))
if compiler.fc:
env.set("SPACK_FC", compiler.fc)
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
else:
env.set("FC", os.path.join(link_dir, "fc"))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
# Check whether we want to force RPATH or RUNPATH
if spack.config.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
else:
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
if pkg.keep_werror is not None:
keep_werror = pkg.keep_werror
else:
@@ -401,6 +453,10 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
_add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add
isa_arg = optimization_flags(compiler, spec.target)
env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate.
# env_flags are easy to accidentally override.
inject_flags = {}
@@ -433,23 +489,75 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
# implicit variables
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
pkg.flags_to_build_system_args(build_system_flags)
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
compiler.setup_custom_environment(pkg, env)
return env
def optimization_flags(compiler, target):
if spack.compilers.is_mixed_toolchain(compiler):
msg = (
"microarchitecture specific optimizations are not "
"supported yet on mixed compiler toolchains [check"
f" {compiler.name}@{compiler.version} for further details]"
)
tty.debug(msg)
return ""
# Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a
# custom spec.
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
try:
compiler_version = compiler.real_version
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
try:
result = target.optimization_flags(compiler.name, version_number)
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
result = ""
return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
@@ -458,8 +566,39 @@ def set_wrapper_variables(pkg, env):
this function computes these options in a manner that is intended to match the DAG traversal
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
is using topo order."""
# Set compiler flags injected from the spec
set_wrapper_environment_variables_for_flags(pkg, env)
# Set environment variables if specified for
# the given compiler
compiler = pkg.compiler
env.extend(spack.schema.environment.parse(compiler.environment))
if compiler.extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
for item in env_paths:
env.prepend_path("PATH", item)
env.set_path(SPACK_ENV_PATH, env_paths)
# Working directory for the spack command itself, for debug logs.
if spack.config.get("config:debug"):
@@ -525,15 +664,22 @@ def set_wrapper_variables(pkg, env):
lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path)
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
pkg.compiler.default_dynamic_linker
)
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
if default_dynamic_linker_filter:
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
# just this filter.
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
if implicit_rpaths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
@@ -585,6 +731,26 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
# Don't use which for this; we want to find it in the current dir.
module.configure = Executable("./configure")
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
pkg_compiler = None
try:
pkg_compiler = pkg.compiler
except spack.compilers.NoCompilerForSpecError as e:
tty.debug(f"cannot set 'spack_cc': {str(e)}")
if pkg_compiler is not None:
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
else:
module.spack_cc = None
module.spack_cxx = None
module.spack_f77 = None
module.spack_fc = None
# Useful directories within the prefix are encapsulated in
# a Prefix object.
module.prefix = pkg.prefix
@@ -715,6 +881,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def load_external_modules(pkg):
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
pkg (spack.package_base.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
for external_module in external_modules:
load_module(external_module)
def setup_package(pkg, dirty, context: Context = Context.BUILD):
"""Execute all environment setup routines."""
if context not in (Context.BUILD, Context.TEST):
@@ -735,6 +916,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
context == Context.TEST and pkg.test_requires_compiler
)
if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
# Platform specific setup goes before package specific setup. This is for setting
@@ -746,26 +928,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies")
tty.debug("setup_package: adding compiler wrappers paths")
env_by_name = env_mods.group_by_name()
for x in env_by_name["SPACK_COMPILER_WRAPPER_PATH"]:
assert isinstance(
x, PrependPath
), "unexpected setting used for SPACK_COMPILER_WRAPPER_PATH"
env_mods.prepend_path("PATH", x.value)
# Check whether we want to force RPATH or RUNPATH
enable_var_name, disable_var_name = "SPACK_ENABLE_NEW_DTAGS", "SPACK_DISABLE_NEW_DTAGS"
if enable_var_name in env_by_name and disable_var_name in env_by_name:
enable_new_dtags = _extract_dtags_arg(env_by_name, var_name=enable_var_name)
disable_new_dtags = _extract_dtags_arg(env_by_name, var_name=disable_var_name)
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
env_mods.set("SPACK_DTAGS_TO_STRIP", enable_new_dtags)
env_mods.set("SPACK_DTAGS_TO_ADD", disable_new_dtags)
else:
env_mods.set("SPACK_DTAGS_TO_STRIP", disable_new_dtags)
env_mods.set("SPACK_DTAGS_TO_ADD", enable_new_dtags)
if context == Context.TEST:
env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
@@ -779,7 +941,12 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
# Load modules on an already clean environment, just before applying Spack's
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
load_external_modules(setup_context)
if need_compiler:
tty.debug("setup_package: loading compiler modules")
for mod in pkg.compiler.modules:
load_module(mod)
load_external_modules(pkg)
# Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn)
@@ -790,14 +957,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
return env_base
def _extract_dtags_arg(env_by_name: Dict[str, ModificationList], *, var_name: str) -> str:
try:
enable_new_dtags = env_by_name[var_name][0].value # type: ignore[union-attr]
except (KeyError, IndexError, AttributeError):
enable_new_dtags = ""
return enable_new_dtags
class EnvironmentVisitor:
def __init__(self, *roots: spack.spec.Spec, context: Context):
# For the roots (well, marked specs) we follow different edges
@@ -1076,21 +1235,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
env.prepend_path("PATH", bin_dir)
def load_external_modules(context: SetupContext) -> None:
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
context: A populated SetupContext object
"""
for spec, _ in context.external:
external_modules = spec.external_modules or []
for external_module in external_modules:
load_module(external_module)
def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable,

View File

@@ -11,7 +11,6 @@
import spack.build_environment
import spack.builder
import spack.compilers.libraries
import spack.error
import spack.package_base
import spack.phase_callbacks
@@ -399,44 +398,33 @@ def _do_patch_libtool(self) -> None:
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
# Replace empty linker flag prefixes:
if self.spec.satisfies("%nag"):
if self.pkg.compiler.name == "nag":
# Nag is mixed with gcc and g++, which are recognized correctly.
# Therefore, we change only Fortran values:
nag_pkg = self.spec["fortran"].package
for tag in ["fc", "f77"]:
marker = markers[tag]
x.filter(
regex='^wl=""$',
repl=f'wl="{nag_pkg.linker_arg}"',
start_at=f"# ### BEGIN {marker}",
stop_at=f"# ### END {marker}",
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker),
)
else:
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
if compiler_spec:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
# Replace empty PIC flag values:
for compiler, marker in markers.items():
if compiler == "cc":
language = "c"
elif compiler == "cxx":
language = "cxx"
else:
language = "fortran"
if language not in self.spec:
continue
for cc, marker in markers.items():
x.filter(
regex='^pic_flag=""$',
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
start_at=f"# ### BEGIN {marker}",
stop_at=f"# ### END {marker}",
repl='pic_flag="{0}"'.format(
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
),
start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker),
)
# Other compiler-specific patches:
if self.spec.satisfies("%fj"):
if self.pkg.compiler.name == "fj":
x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/"
for o in [
@@ -449,7 +437,7 @@ def _do_patch_libtool(self) -> None:
r"crtendS\.o",
]:
x.filter(regex=(rehead + o), repl="")
elif self.spec.satisfies("%nag"):
elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]:
marker = markers[tag]
start_at = "# ### BEGIN {0}".format(marker)

View File

@@ -70,8 +70,12 @@ class CachedCMakeBuilder(CMakeBuilder):
@property
def cache_name(self):
compiler_str = f"{self.spec['c'].name}-{self.spec['c'].version}"
return f"{self.pkg.name}-{self.spec.architecture.platform}-{compiler_str}.cmake"
return "{0}-{1}-{2}@{3}.cmake".format(
self.pkg.name,
self.pkg.spec.architecture,
self.pkg.spec.compiler.name,
self.pkg.spec.compiler.version,
)
@property
def cache_path(self):
@@ -114,9 +118,7 @@ def initconfig_compiler_entries(self):
# Fortran compiler is optional
if "FC" in os.environ:
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
system_fc_entry = cmake_cache_path(
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
)
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
else:
spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec"
@@ -132,8 +134,8 @@ def initconfig_compiler_entries(self):
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
" " + spack_fc_entry,
"else()\n",
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
" " + system_fc_entry,
"endif()\n",
]
@@ -276,24 +278,17 @@ def initconfig_hardware_entries(self):
entries.append("# ROCm")
entries.append("#------------------{0}\n".format("-" * 30))
if spec.satisfies("^blt@0.7:"):
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
else:
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath(
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
)
)
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
)
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)

View File

@@ -6,13 +6,12 @@
import pathlib
import re
import sys
from typing import Dict, List, Optional, Sequence, Tuple, Union
from typing import Dict, List, Sequence, Tuple, Union
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
from llnl.util.lang import classproperty
import spack
import spack.compilers.error
import spack.compiler
import spack.package_base
import spack.util.executable
@@ -44,9 +43,6 @@ class CompilerPackage(spack.package_base.PackageBase):
#: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
#: Relative path to compiler wrappers
compiler_wrapper_link_paths: Dict[str, str] = {}
def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
@@ -81,14 +77,14 @@ def executables(cls) -> Sequence[str]:
]
@classmethod
def determine_version(cls, exe: Path) -> str:
def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument
if isinstance(version_argument, str):
version_argument = (version_argument,)
for va in version_argument:
try:
output = compiler_output(exe, version_argument=va)
output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output)
if match:
return ".".join(match.groups())
@@ -99,11 +95,10 @@ def determine_version(cls, exe: Path) -> str:
f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}"
)
return ""
@classmethod
def compiler_bindir(cls, prefix: Path) -> Path:
"""Overridable method for the location of the compiler bindir within the prefix"""
"""Overridable method for the location of the compiler bindir within the preifx"""
return os.path.join(prefix, "bin")
@classmethod
@@ -147,109 +142,3 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
#: Returns the argument needed to set the RPATH, or None if it does not exist
rpath_arg: Optional[str] = "-Wl,-rpath,"
#: Flag that needs to be used to pass an argument to the linker
linker_arg: str = "-Wl,"
#: Flag used to produce Position Independent Code
pic_flag: str = "-fPIC"
#: Flag used to get verbose output
verbose_flags: str = "-v"
#: Flag to activate OpenMP support
openmp_flag: str = "-fopenmp"
implicit_rpath_libs: List[str] = []
def standard_flag(self, *, language: str, standard: str) -> str:
"""Returns the flag used to enforce a given standard for a language"""
if language not in self.supported_languages:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' language"
)
try:
return self._standard_flag(language=language, standard=standard)
except (KeyError, RuntimeError) as e:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' standard {standard}"
) from e
def _standard_flag(self, *, language: str, standard: str) -> str:
raise NotImplementedError("Must be implemented by derived classes")
def archspec_name(self) -> str:
"""Name that archspec uses to refer to this compiler"""
return self.spec.name
@property
def cc(self) -> Optional[str]:
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("c", None)
return self._cc_path()
def _cc_path(self) -> Optional[str]:
"""Returns the path to the C compiler, if the package was installed by Spack"""
return None
@property
def cxx(self) -> Optional[str]:
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("cxx", None)
return self._cxx_path()
def _cxx_path(self) -> Optional[str]:
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
return None
@property
def fortran(self):
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
if self.spec.external:
return self.spec.extra_attributes["compilers"].get("fortran", None)
return self._fortran_path()
def _fortran_path(self) -> Optional[str]:
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
return None
@memoized
def _compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Returns the output from the compiler invoked with the given version argument.
Args:
compiler_path: path of the compiler to be invoked
version_argument: the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
if not version_argument:
return compiler(
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
)
return compiler(
version_argument,
output=str,
error=str,
ignore_errors=ignore_errors,
timeout=120,
fail_on_error=True,
)
def compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Wrapper for _get_compiler_version_output()."""
# This ensures that we memoize compiler output by *absolute path*,
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
compiler_path = spack.util.executable.which_string(str(compiler_path), required=True)
return _compiler_output(
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
)

View File

@@ -76,7 +76,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change
selection heuristics.
Default is whatever version of msvc has been selected by concretization"""
return "v" + self.spec["msvc"].package.platform_toolset_ver
return "v" + self.pkg.compiler.platform_toolset_ver
@property
def std_msbuild_args(self):

View File

@@ -278,6 +278,10 @@ def update_external_dependencies(self, extendee_spec=None):
if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name
# Ensure compiler information is present
if not python.compiler:
python.compiler = self.spec.compiler
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())

View File

@@ -6,7 +6,6 @@
import codecs
import json
import os
import pathlib
import re
import shutil
import stat
@@ -14,7 +13,7 @@
import tempfile
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set, Union
from typing import Callable, Dict, List, Set
from urllib.request import Request
import llnl.path
@@ -24,6 +23,8 @@
import spack
import spack.binary_distribution as bindist
import spack.builder
import spack.concretize
import spack.config as cfg
import spack.environment as ev
import spack.error
@@ -32,7 +33,6 @@
import spack.paths
import spack.repo
import spack.spec
import spack.store
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
@@ -41,7 +41,6 @@
from spack import traverse
from spack.error import SpackError
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.version import GitVersion, StandardVersion
from .common import (
IS_WINDOWS,
@@ -80,45 +79,6 @@ def get_change_revisions():
return None, None
def get_added_versions(
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
path: str,
from_ref: str = "HEAD~1",
to_ref: str = "HEAD",
) -> List[Union[StandardVersion, GitVersion]]:
"""Get a list of the versions added between `from_ref` and `to_ref`.
Args:
checksums_version_dict (Dict): all package versions keyed by known checksums.
path (str): path to the package.py
from_ref (str): oldest git ref, defaults to `HEAD~1`
to_ref (str): newer git ref, defaults to `HEAD`
Returns: list of versions added between refs
"""
git_exe = spack.util.git.git(required=True)
# Gather git diff
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
# Store added and removed versions
# Removed versions are tracked here to determine when versions are moved in a file
# and show up as both added and removed in a git diff.
added_checksums = set()
removed_checksums = set()
# Scrape diff for modified versions and prune added versions if they show up
# as also removed (which means they've actually just moved in the file and
# we shouldn't need to rechecksum them)
for checksum in checksums_version_dict.keys():
for line in diff_lines:
if checksum in line:
if line.startswith("+"):
added_checksums.add(checksum)
if line.startswith("-"):
removed_checksums.add(checksum)
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
"""Given an environment manifest path and two revisions to compare, return
whether or not the stack was changed. Returns True if the environment
@@ -264,7 +224,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
reason_msg = ", ".join(reasons)
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
if not prune:
status = colorize("@*g{[x]} ")
@@ -420,9 +380,10 @@ def generate_pipeline(env: ev.Environment, args) -> None:
args: (spack.main.SpackArgumentParser): Parsed arguments from the command
line.
"""
with env.write_transaction():
env.concretize()
env.write()
with spack.concretize.disable_compiler_existence_check():
with env.write_transaction():
env.concretize()
env.write()
options = collect_pipeline_options(env, args)
@@ -620,25 +581,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
tty.debug(f"job spec: {job_spec}")
try:
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
except spack.error.SpackError as e:
tty.error(f"Cannot copy logs: {str(e)}")
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
job_pkg = pkg_cls(job_spec)
tty.debug(f"job package: {job_pkg}")
except AssertionError:
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
tty.error(msg)
return
# Get the package's archived files
archive_files = []
archive_root = package_metadata_root / "archived-files"
if archive_root.is_dir():
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
else:
msg = "Cannot copy package archived files: archived-files must be a directory"
tty.warn(msg)
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
build_env_mods = package_metadata_root / "spack-build-env.txt"
for f in [build_log_zipped, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir)
stage_dir = job_pkg.stage.path
tty.debug(f"stage dir: {stage_dir}")
for file in [
job_pkg.log_path,
job_pkg.env_mods_path,
*spack.builder.create(job_pkg).archive_files,
]:
copy_files_to_artifacts(file, job_log_dir)
def copy_test_logs_to_artifacts(test_stage, job_test_dir):

View File

@@ -209,8 +209,10 @@ def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
Returns: (str) given spec's CDash build name."""
if spec:
spec_str = spec.format("{name}{@version}{%compiler} hash={hash} arch={architecture}")
build_name = f"{spec_str} ({self.build_group})"
build_name = (
f"{spec.name}@{spec.version}%{spec.compiler} "
f"hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
)
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
return build_name

View File

@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
if len(matching_specs) <= 1:
return
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
args = ["%s matches multiple packages." % spec, "Matching packages:"]
args += [
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
@@ -375,13 +375,8 @@ def iter_groups(specs, indent, all_headers):
index = index_by(specs, ("architecture", "compiler"))
ispace = indent * " "
def _key(item):
if item is None:
return ""
return str(item)
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index, key=_key)):
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print()
@@ -453,6 +448,7 @@ def get_arg(name, default=None):
hashes = get_arg("long", False)
namespaces = get_arg("namespaces", False)
flags = get_arg("show_flags", False)
full_compiler = get_arg("show_full_compiler", False)
variants = get_arg("variants", False)
groups = get_arg("groups", True)
all_headers = get_arg("all_headers", False)
@@ -474,10 +470,13 @@ def get_arg(name, default=None):
if format_string is None:
nfmt = "{fullname}" if namespaces else "{name}"
ffmt = ""
if flags:
if full_compiler or flags:
ffmt += "{%compiler.name}"
if full_compiler:
ffmt += "{@compiler.version}"
ffmt += " {compiler_flags}"
vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + vfmt + ffmt
format_string = nfmt + "{@version}" + ffmt + vfmt
def fmt(s, depth=0):
"""Formatter function for all output specs"""

View File

@@ -4,15 +4,12 @@
import json
import os
import re
import shutil
import sys
from typing import Dict
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util import tty
import spack.binary_distribution as bindist
import spack.ci as spack_ci
@@ -21,22 +18,12 @@
import spack.cmd.common.arguments
import spack.config as cfg
import spack.environment as ev
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht
import spack.mirrors.mirror
import spack.package_base
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.executable
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
import spack.version
description = "manage continuous integration pipelines"
section = "build"
@@ -45,7 +32,6 @@
SPACK_COMMAND = "spack"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
def deindent(desc):
@@ -205,16 +191,6 @@ def setup_parser(subparser):
reproduce.set_defaults(func=ci_reproduce)
# Verify checksums inside of ci workflows
verify_versions = subparsers.add_parser(
"verify-versions",
description=deindent(ci_verify_versions.__doc__),
help=spack.cmd.first_line(ci_verify_versions.__doc__),
)
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
verify_versions.set_defaults(func=ci_verify_versions)
def ci_generate(args):
"""generate jobs file from a CI-aware spack file
@@ -451,7 +427,7 @@ def ci_rebuild(args):
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--only=package"]
root_install_args = install_args + ["--keep-stage", "--only=package"]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
@@ -488,7 +464,8 @@ def ci_rebuild(args):
job_spec.to_dict(hash=ht.dag_hash),
)
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
# We generated the "spack install ..." command to "--keep-stage", copy
# any logs from the staging directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# If the installation succeeded and we're running stand-alone tests for
@@ -683,159 +660,6 @@ def _gitlab_artifacts_url(url: str) -> str:
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
def validate_standard_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the checksum of a package version based on a tarball.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
url_dict: Dict[spack.version.StandardVersion, str] = {}
for version in versions:
url = pkg.find_valid_url_for_version(version)
url_dict[version] = url
version_hashes = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, fetch_options=pkg.fetch_options
)
valid_checksums = True
for version, sha in version_hashes.items():
if sha != pkg.versions[version]["sha256"]:
tty.error(
f"Invalid checksum found {pkg.name}@{version}\n"
f" [package.py] {pkg.versions[version]['sha256']}\n"
f" [Downloaded] {sha}"
)
valid_checksums = False
continue
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
return valid_checksums
def validate_git_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the commit and tag of a package version based on a git repository.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
valid_commit = True
for version in versions:
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
with spack.stage.Stage(fetcher) as stage:
known_commit = pkg.versions[version]["commit"]
try:
stage.fetch()
except spack.error.FetchError:
tty.error(
f"Invalid commit for {pkg.name}@{version}\n"
f" {known_commit} could not be checked out in the git repository."
)
valid_commit = False
continue
# Test if the specified tag matches the commit in the package.py
# We retrieve the commit associated with a tag and compare it to the
# commit that is located in the package.py file.
if "tag" in pkg.versions[version]:
tag = pkg.versions[version]["tag"]
try:
with fs.working_dir(stage.source_path):
found_commit = fetcher.git(
"rev-list", "-n", "1", tag, output=str, error=str
).strip()
except spack.util.executable.ProcessError:
tty.error(
f"Invalid tag for {pkg.name}@{version}\n"
f" {tag} could not be found in the git repository."
)
valid_commit = False
continue
if found_commit != known_commit:
tty.error(
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
f" [package.py] {known_commit}\n"
f" [Downloaded] {found_commit}"
)
valid_commit = False
continue
# If we have downloaded the repository, found the commit, and compared
# the tag (if specified) we can conclude that the version is pointing
# at what we would expect.
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
return valid_commit
def ci_verify_versions(args):
"""validate version checksum & commits between git refs
This command takes a from_ref and to_ref arguments and
then parses the git diff between the two to determine which packages
have been modified verifies the new checksums inside of them.
"""
with fs.working_dir(spack.paths.prefix):
# We use HEAD^1 explicitly on the merge commit created by
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
# Get a list of package names from the modified files.
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
failed_version = False
for pkg_name, path in pkgs:
spec = spack.spec.Spec(pkg_name)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
# Skip checking manual download packages and trust the maintainers
if pkg.manual_download:
tty.warn(f"Skipping manual download package: {pkg_name}")
continue
# Store versions checksums / commits for future loop
checksums_version_dict = {}
commits_version_dict = {}
for version in pkg.versions:
# If the package version defines a sha256 we'll use that as the high entropy
# string to detect which versions have been added between from_ref and to_ref
if "sha256" in pkg.versions[version]:
checksums_version_dict[pkg.versions[version]["sha256"]] = version
# If a package version instead defines a commit we'll use that as a
# high entropy string to detect new versions.
elif "commit" in pkg.versions[version]:
commits_version_dict[pkg.versions[version]["commit"]] = version
# TODO: enforce every version have a commit or a sha256 defined if not
# an infinite version (there are a lot of package's where this doesn't work yet.)
with fs.working_dir(spack.paths.prefix):
added_checksums = spack_ci.get_added_versions(
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
added_commits = spack_ci.get_added_versions(
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
if added_checksums:
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
if added_commits:
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
if failed_version:
sys.exit(1)
def ci(parser, args):
if args.func:
return args.func(args)

View File

@@ -4,14 +4,13 @@
import argparse
import sys
import warnings
import llnl.util.tty as tty
from llnl.util.lang import index_by
from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize
import spack.compilers.config
import spack.compilers
import spack.config
import spack.spec
from spack.cmd.common import arguments
@@ -34,20 +33,20 @@ def setup_parser(subparser):
mixed_toolchain_group.add_argument(
"--mixed-toolchain",
action="store_true",
default=False,
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)",
default=sys.platform == "darwin",
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
)
mixed_toolchain_group.add_argument(
"--no-mixed-toolchain",
action="store_false",
dest="mixed_toolchain",
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
)
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_modify_scope("packages"),
default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
arguments.add_common_arguments(find_parser, ["jobs"])
@@ -80,97 +79,77 @@ def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
if args.mixed_toolchain:
warnings.warn(
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
"has no effect. The option will be removed in Spack v1.1"
)
paths = args.add_paths or None
new_compilers = spack.compilers.config.find_compilers(
path_hints=paths, scope=args.scope, max_workers=args.jobs
new_compilers = spack.compilers.find_compilers(
path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
)
if new_compilers:
n = len(new_compilers)
s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages")
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers)
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4)
else:
tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:")
colify(spack.compilers.config.compiler_config_files(), indent=4)
colify(spack.compilers.compiler_config_files(), indent=4)
def compiler_remove(args):
remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG)
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope)
if not candidates:
tty.die(f"No compiler matches '{args.compiler_spec}'")
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates))
if not candidate_compilers:
tty.die("No compilers match spec %s" % compiler_spec)
if not args.all and len(candidates) > 1:
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':")
print()
colify(compiler_strs, indent=4)
print()
print(
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
" to remove all of them."
)
if not args.all and len(candidate_compilers) > 1:
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
remover.flush()
tty.msg("The following compilers have been removed:")
print()
colify(compiler_strs, indent=4)
print()
for current_compiler in candidate_compilers:
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
tty.msg(f"{current_compiler.spec.display_str} has been removed")
def compiler_info(args):
"""Print info about all compilers matching a spec."""
query = spack.spec.Spec(args.compiler_spec)
all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
compilers = [x for x in all_compilers if x.satisfies(query)]
cspec = spack.spec.CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
if not compilers:
tty.die(f"No compilers match spec {query.cformat()}")
tty.die("No compilers match spec %s" % cspec)
else:
for c in compilers:
print(f"{c.cformat()}:")
print(f" prefix: {c.external_path}")
extra_attributes = getattr(c, "extra_attributes", {})
if "compilers" in extra_attributes:
print(" compilers:")
for language, exe in extra_attributes.get("compilers", {}).items():
print(f" {language}: {exe}")
if "flags" in extra_attributes:
print(" flags:")
for flag, flag_value in extra_attributes["flags"].items():
print(f" {flag} = {flag_value}")
if "environment" in extra_attributes:
environment = extra_attributes["environment"]
if len(environment.get("set", {})) != 0:
print(c.spec.display_str + ":")
print("\tpaths:")
for cpath in ["cc", "cxx", "f77", "fc"]:
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags:
print("\tflags:")
for flag, flag_value in c.flags.items():
print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0:
if len(c.environment.get("set", {})) != 0:
print("\tenvironment:")
print("\t set:")
for key, value in environment["set"].items():
print(f"\t {key} = {value}")
if "extra_rpaths" in extra_attributes:
print(" extra rpaths:")
for extra_rpath in extra_attributes["extra_rpaths"]:
print(f" {extra_rpath}")
if getattr(c, "external_modules", []):
print(" modules: ")
for module in c.external_modules:
print(f" {module}")
print()
for key, value in c.environment["set"].items():
print("\t %s = %s" % (key, value))
if c.extra_rpaths:
print("\tExtra rpaths:")
for extra_rpath in c.extra_rpaths:
print("\t\t%s" % extra_rpath)
print("\tmodules = %s" % c.modules)
print("\toperating system = %s" % c.operating_system)
def compiler_list(args):
compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
# If there are no compilers in any scope, and we're outputting to a tty, give a
# hint to the user.
@@ -183,7 +162,7 @@ def compiler_list(args):
tty.msg(msg)
return
index = index_by(compilers, spack.compilers.config.name_os_target)
index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
tty.msg("Available compilers")
@@ -202,10 +181,10 @@ def compiler_list(args):
name, os, target = key
os_str = os
if target:
os_str += f"-{target}"
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}"
os_str += "-%s" % target
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers)))
colify(reversed(sorted(c.spec.display_str for c in compilers)))
def compiler(parser, args):

View File

@@ -521,6 +521,8 @@ def config_prefer_upstream(args):
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": []})
all = pkgs.get("all", {"compiler": []})
pkgs["all"] = all
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
@@ -530,6 +532,10 @@ def config_prefer_upstream(args):
if version not in pkg["version"]:
pkg["version"].append(version)
compiler = str(spec.compiler)
if compiler not in all["compiler"]:
all["compiler"].append(compiler)
# Get and list all the variants that differ from the default.
variants = []
for var_name, variant in spec.variants.items():

View File

@@ -55,7 +55,7 @@ def dependencies(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
format_string = "{name}{@version}{/hash:7}{%compiler}"
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
deps = spack.store.STORE.db.installed_relatives(

View File

@@ -93,7 +93,7 @@ def dependents(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
format_string = "{name}{@version}{/hash:7}{%compiler}"
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependents of %s" % spec.cformat(format_string))
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)

View File

@@ -3,13 +3,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
from typing import Optional
import llnl.util.tty as tty
import spack.cmd
import spack.config
import spack.environment
import spack.fetch_strategy
import spack.repo
import spack.spec
@@ -33,33 +31,37 @@ def setup_parser(subparser):
"--no-clone",
action="store_false",
dest="clone",
default=None,
help="do not clone, the package already exists at the source path",
)
clone_group.add_argument(
"--clone",
action="store_true",
dest="clone",
default=True,
help=(
"(default) clone the package unless the path already exists, "
"use --force to overwrite"
),
default=None,
help="clone the package even if the path already exists",
)
subparser.add_argument(
"-f", "--force", help="remove any files or directories that block cloning source code"
)
subparser.add_argument(
"-r",
"--recursive",
action="store_true",
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
)
arguments.add_common_arguments(subparser, ["spec"])
def _update_config(spec, path):
find_fn = lambda section: spec.name in section
entry = {"spec": str(spec)}
if path != spec.name:
entry["path"] = path
def change_fn(section):
section[spec.name] = entry
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
@@ -81,43 +83,44 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
package.stage.steal_source(abspath)
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
version = spec.versions.concrete_range_as_version
if not version:
# first check environment for a matching concrete spec
matching_specs = env.all_matching_specs(spec)
if matching_specs:
version = matching_specs[0].version
test_spec = spack.spec.Spec(f"{spec}@{version}")
for m_spec in matching_specs:
if not m_spec.satisfies(test_spec):
raise SpackError(
f"{spec.name}: has multiple concrete instances in the graph that can't be"
" satisified by a single develop spec. To use `spack develop` ensure one"
" of the following:"
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
"this means they all share the same version)"
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
" indicate what should be developed"
)
else:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])
def develop(parser, args):
# Note: we could put develop specs in any scope, but I assume
# users would only ever want to do this for either (a) an active
# env or (b) a specified config file (e.g. that is included by
# an environment)
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
# an active env is not required if a scope is specified
env = spack.cmd.require_active_env(cmd_name="develop")
if not args.spec:
if args.clone is False:
raise SpackError("No spec provided to spack develop command")
# download all dev specs
for name, entry in env.dev_specs.items():
path = entry.get("path", name)
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
"""
Handle checking, cloning or overwriting source code
"""
assert spec.versions
if os.path.exists(abspath):
msg = "Skipping developer download of %s" % entry["spec"]
msg += " because its path already exists."
tty.msg(msg)
continue
if clone:
_clone(spec, src_path, force)
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
# are currently supported.
spec = spack.spec.parse_with_version_concrete(entry["spec"])
_retrieve_develop_source(spec, abspath)
if not clone and not os.path.exists(src_path):
raise SpackError(f"Provided path {src_path} does not exist")
if not env.dev_specs:
tty.warn("No develop specs to download")
return
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
raise SpackError("spack develop requires at most one named spec")
spec = specs[0]
version = spec.versions.concrete_range_as_version
if not version:
@@ -126,114 +129,40 @@ def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, for
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])
# If user does not specify --path, we choose to create a directory in the
# active environment's directory, named after the spec
path = args.path or spec.name
if not os.path.isabs(path):
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
else:
abspath = path
def _update_config(spec, path):
find_fn = lambda section: spec.name in section
# clone default: only if the path doesn't exist
clone = args.clone
if clone is None:
clone = not os.path.exists(abspath)
entry = {"spec": str(spec)}
if path and path != spec.name:
entry["path"] = path
if not clone and not os.path.exists(abspath):
raise SpackError("Provided path %s does not exist" % abspath)
def change_fn(section):
section[spec.name] = entry
if clone:
if os.path.exists(abspath):
if args.force:
shutil.rmtree(abspath)
else:
msg = "Path %s already exists and cannot be cloned to." % abspath
msg += " Use `spack develop -f` to overwrite."
raise SpackError(msg)
spack.config.change_or_add("develop", find_fn, change_fn)
def update_env(
env: spack.environment.Environment,
spec: spack.spec.Spec,
specified_path: Optional[str] = None,
build_dir: Optional[str] = None,
):
"""
Update the spack.yaml file with additions or changes from a develop call
"""
tty.debug(f"Updating develop config for {env.name} transactionally")
if not specified_path:
dev_entry = env.dev_specs.get(spec.name)
if dev_entry:
specified_path = dev_entry.get("path", None)
_retrieve_develop_source(spec, abspath)
tty.debug("Updating develop config for {0} transactionally".format(env.name))
with env.write_transaction():
if build_dir is not None:
if args.build_directory is not None:
spack.config.add(
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
"packages:{}:package_attributes:build_directory:{}".format(
spec.name, args.build_directory
),
env.scope_name,
)
# add develop spec and update path
_update_config(spec, specified_path)
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
if os.path.exists(abspath):
if force:
shutil.rmtree(abspath)
else:
msg = f"Skipping developer download of {spec.name}"
msg += f" because its path {abspath} already exists."
tty.msg(msg)
return
# cloning can take a while and it's nice to get a message for the longer clones
tty.msg(f"Cloning source code for {spec}")
_retrieve_develop_source(spec, abspath)
def _abs_code_path(
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
):
src_path = path if path else spec.name
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
def _dev_spec_generator(args, env):
"""
Generator function to loop over all the develop specs based on how the command is called
If no specs are supplied then loop over the develop specs listed in the environment.
"""
if not args.spec:
if args.clone is False:
raise SpackError("No spec provided to spack develop command")
for name, entry in env.dev_specs.items():
path = entry.get("path", name)
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
# are currently supported.
spec = spack.spec.parse_with_version_concrete(entry["spec"])
yield spec, abspath
else:
specs = spack.cmd.parse_specs(args.spec)
if (args.path or args.build_directory) and len(specs) > 1:
raise SpackError(
"spack develop requires at most one named spec when using the --path or"
" --build-directory arguments"
)
for spec in specs:
if args.recursive:
concrete_specs = env.all_matching_specs(spec)
if not concrete_specs:
tty.warn(
f"{spec.name} has no matching concrete specs in the environment and "
"will be skipped. `spack develop --recursive` requires a concretized"
" environment"
)
else:
for s in concrete_specs:
for node_spec in s.traverse(direction="parents", root=True):
tty.debug(f"Recursive develop for {node_spec.name}")
yield node_spec, _abs_code_path(env, node_spec, args.path)
else:
yield spec, _abs_code_path(env, spec, args.path)
def develop(parser, args):
env = spack.cmd.require_active_env(cmd_name="develop")
for spec, abspath in _dev_spec_generator(args, env):
assure_concrete_spec(env, spec)
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
update_env(env, spec, args.path, args.build_directory)
_update_config(spec, path)

View File

@@ -98,7 +98,7 @@ def setup_parser(subparser):
"--show-full-compiler",
action="store_true",
dest="show_full_compiler",
help="(DEPRECATED) show full compiler specs. Currently it's a no-op",
help="show full compiler specs",
)
implicit_explicit = subparser.add_mutually_exclusive_group()
implicit_explicit.add_argument(
@@ -278,6 +278,7 @@ def root_decorator(spec, string):
# these enforce details in the root specs to show what the user asked for
namespaces=True,
show_flags=True,
show_full_compiler=True,
decorator=root_decorator,
variants=True,
)
@@ -300,6 +301,7 @@ def root_decorator(spec, string):
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True,
show_flags=True,
show_full_compiler=True,
variants=True,
)
print()

Some files were not shown because too many files have changed in this diff Show More