Overhaul of the spack.compilers package

Now the package contains modules that help using, or
detecting, compiler packages.

Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
This commit is contained in:
Massimiliano Culpo 2024-09-25 19:40:03 +02:00
parent 5614e23f0b
commit fa40e6d021
No known key found for this signature in database
GPG Key ID: 3E52BB992233066C
33 changed files with 1192 additions and 757 deletions

8
lib/spack/env/cc vendored
View File

@ -41,10 +41,6 @@ SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG
SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG SPACK_LINKER_ARG
SPACK_SHORT_SPEC SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS SPACK_SYSTEM_DIRS
@ -223,6 +219,7 @@ for param in $params; do
if eval "test -z \"\${${param}:-}\""; then if eval "test -z \"\${${param}:-}\""; then
die "Spack compiler must be run from Spack! Input '$param' is missing." die "Spack compiler must be run from Spack! Input '$param' is missing."
fi fi
# FIXME (compiler as nodes) add checks on whether `SPACK_XX_RPATH` is set if `SPACK_XX` is set
done done
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over. # eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
@ -346,6 +343,9 @@ case "$command" in
;; ;;
ld|ld.gold|ld.lld) ld|ld.gold|ld.lld)
mode=ld mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;; ;;
*) *)
die "Unknown compiler: $command" die "Unknown compiler: $command"

View File

@ -16,8 +16,7 @@
import archspec.cpu import archspec.cpu
import spack.compiler import spack.compilers.config
import spack.compilers
import spack.platforms import spack.platforms
import spack.spec import spack.spec
import spack.traverse import spack.traverse
@ -39,7 +38,7 @@ def __init__(self, configuration):
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration) self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler": def _valid_compiler_or_raise(self):
if str(self.host_platform) == "linux": if str(self.host_platform) == "linux":
compiler_name = "gcc" compiler_name = "gcc"
elif str(self.host_platform) == "darwin": elif str(self.host_platform) == "darwin":
@ -50,7 +49,7 @@ def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
compiler_name = "clang" compiler_name = "clang"
else: else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}") raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec( candidates = spack.compilers.config.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture compiler_name, arch_spec=self.host_architecture
) )
if not candidates: if not candidates:

View File

@ -11,7 +11,7 @@
from llnl.util import tty from llnl.util import tty
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.environment import spack.environment
import spack.modules import spack.modules
@ -143,8 +143,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None: def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch() arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch): if not spack.compilers.config.compilers_for_arch(arch):
spack.compilers.find_compilers() spack.compilers.config.find_compilers()
@contextlib.contextmanager @contextlib.contextmanager

View File

@ -59,7 +59,7 @@
import spack.build_systems.meson import spack.build_systems.meson
import spack.build_systems.python import spack.build_systems.python
import spack.builder import spack.builder
import spack.compilers import spack.compilers.libraries
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
@ -73,7 +73,6 @@
import spack.store import spack.store
import spack.subprocess_context import spack.subprocess_context
import spack.util.executable import spack.util.executable
import spack.util.libc
from spack import traverse from spack import traverse
from spack.context import Context from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError from spack.error import InstallError, NoHeadersError, NoLibrariesError
@ -436,17 +435,15 @@ def set_wrapper_variables(pkg, env):
lib_path = os.path.join(pkg.prefix, libdir) lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path) rpath_dirs.insert(0, lib_path)
# FIXME (compiler as nodes): recover this filter
# filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
# pkg.compiler.default_dynamic_linker
# )
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path # TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
# branch above). link_dirs should be filtered with entries from _parse_link_paths. # branch above). link_dirs should be filtered with entries from _parse_link_paths.
link_dirs = list(dedupe(filter_system_paths(link_dirs))) link_dirs = list(dedupe(filter_system_paths(link_dirs)))
include_dirs = list(dedupe(filter_system_paths(include_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
# rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
if default_dynamic_linker_filter:
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
# Spack managed directories include the stage, store and upstream stores. We extend this with # Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS). # their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).

View File

@ -13,6 +13,7 @@
import spack.build_environment import spack.build_environment
import spack.builder import spack.builder
import spack.compilers.libraries
import spack.error import spack.error
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
@ -396,33 +397,44 @@ def _do_patch_libtool(self) -> None:
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper()) markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
# Replace empty linker flag prefixes: # Replace empty linker flag prefixes:
if self.pkg.compiler.name == "nag": if self.spec.satisfies("%nag"):
# Nag is mixed with gcc and g++, which are recognized correctly. # Nag is mixed with gcc and g++, which are recognized correctly.
# Therefore, we change only Fortran values: # Therefore, we change only Fortran values:
nag_pkg = self.spec["fortran"].package
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
x.filter( x.filter(
regex='^wl=""$', regex='^wl=""$',
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg), repl=f'wl="{nag_pkg.linker_arg}"',
start_at="# ### BEGIN {0}".format(marker), start_at=f"# ### BEGIN {marker}",
stop_at="# ### END {0}".format(marker), stop_at=f"# ### END {marker}",
) )
else: else:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg)) compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
if compiler_spec:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
# Replace empty PIC flag values: # Replace empty PIC flag values:
for cc, marker in markers.items(): for compiler, marker in markers.items():
if compiler == "cc":
language = "c"
elif compiler == "cxx":
language = "cxx"
else:
language = "fortran"
if language not in self.spec:
continue
x.filter( x.filter(
regex='^pic_flag=""$', regex='^pic_flag=""$',
repl='pic_flag="{0}"'.format( repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc)) start_at=f"# ### BEGIN {marker}",
), stop_at=f"# ### END {marker}",
start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker),
) )
# Other compiler-specific patches: # Other compiler-specific patches:
if self.pkg.compiler.name == "fj": if self.spec.satisfies("%fj"):
x.filter(regex="-nostdlib", repl="", string=True) x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/" rehead = r"/\S*/"
for o in [ for o in [
@ -435,7 +447,7 @@ def _do_patch_libtool(self) -> None:
r"crtendS\.o", r"crtendS\.o",
]: ]:
x.filter(regex=(rehead + o), repl="") x.filter(regex=(rehead + o), repl="")
elif self.pkg.compiler.name == "nag": elif self.spec.satisfies("%nag"):
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
start_at = "# ### BEGIN {0}".format(marker) start_at = "# ### BEGIN {0}".format(marker)

View File

@ -68,12 +68,7 @@ class CachedCMakeBuilder(CMakeBuilder):
@property @property
def cache_name(self): def cache_name(self):
return "{0}-{1}-{2}@{3}.cmake".format( return f"{self.pkg.name}-{self.spec.architecture.platform}-{self.spec.dag_hash()}.cmake"
self.pkg.name,
self.pkg.spec.architecture,
self.pkg.spec.compiler.name,
self.pkg.spec.compiler.version,
)
@property @property
def cache_path(self): def cache_path(self):
@ -116,7 +111,9 @@ def initconfig_compiler_entries(self):
# Fortran compiler is optional # Fortran compiler is optional
if "FC" in os.environ: if "FC" in os.environ:
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"]) spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc) system_fc_entry = cmake_cache_path(
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
)
else: else:
spack_fc_entry = "# No Fortran compiler defined in spec" spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec" system_fc_entry = "# No Fortran compiler defined in spec"
@ -132,8 +129,8 @@ def initconfig_compiler_entries(self):
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]), " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
" " + spack_fc_entry, " " + spack_fc_entry,
"else()\n", "else()\n",
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc), " " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx), " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
" " + system_fc_entry, " " + system_fc_entry,
"endif()\n", "endif()\n",
] ]

View File

@ -15,6 +15,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized from llnl.util.lang import classproperty, memoized
import spack.compilers.libraries
import spack.package_base import spack.package_base
import spack.paths import spack.paths
import spack.util.executable import spack.util.executable
@ -102,6 +103,7 @@ def determine_version(cls, exe: Path) -> str:
f"[{__file__}] Cannot detect a valid version for the executable " f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}" f"{str(exe)}, for package '{cls.name}': {e}"
) )
return ""
@classmethod @classmethod
def compiler_bindir(cls, prefix: Path) -> Path: def compiler_bindir(cls, prefix: Path) -> Path:
@ -200,6 +202,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
("fortran", "fortran", "F77", "SPACK_F77"), ("fortran", "fortran", "F77", "SPACK_F77"),
("fortran", "fortran", "FC", "SPACK_FC"), ("fortran", "fortran", "FC", "SPACK_FC"),
]: ]:
if language not in dependent_spec or dependent_spec[language].name != self.spec.name:
continue
if not hasattr(self, attr_name): if not hasattr(self, attr_name):
continue continue
@ -211,13 +216,15 @@ def setup_dependent_build_environment(self, env, dependent_spec):
wrapper_path = link_dir / self.link_paths.get(language) wrapper_path = link_dir / self.link_paths.get(language)
env.set(wrapper_var_name, str(wrapper_path)) env.set(wrapper_var_name, str(wrapper_path))
env.set(f"SPACK_{wrapper_var_name}_RPATH_ARG", self.rpath_arg)
env.set("SPACK_CC_RPATH_ARG", self.rpath_arg)
env.set("SPACK_CXX_RPATH_ARG", self.rpath_arg)
env.set("SPACK_F77_RPATH_ARG", self.rpath_arg)
env.set("SPACK_FC_RPATH_ARG", self.rpath_arg)
env.set("SPACK_LINKER_ARG", self.linker_arg) env.set("SPACK_LINKER_ARG", self.linker_arg)
detector = spack.compilers.libraries.CompilerPropertyDetector(self.spec)
paths = detector.implicit_rpaths()
if paths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(paths))
# Check whether we want to force RPATH or RUNPATH # Check whether we want to force RPATH or RUNPATH
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath": if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", self.enable_new_dtags) env.set("SPACK_DTAGS_TO_STRIP", self.enable_new_dtags)
@ -240,14 +247,10 @@ def setup_dependent_build_environment(self, env, dependent_spec):
env.set("SPACK_COMPILER_SPEC", spec.format("{name}{@version}{variants}{/hash:7}")) env.set("SPACK_COMPILER_SPEC", spec.format("{name}{@version}{variants}{/hash:7}"))
if spec.extra_attributes: if spec.extra_attributes:
environment = spec.extra_attributes.get("environment")
if environment:
env.extend(spack.schema.environment.parse(environment))
extra_rpaths = spec.extra_attributes.get("extra_rpaths") extra_rpaths = spec.extra_attributes.get("extra_rpaths")
if extra_rpaths: if extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths) extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths) env.append_path("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in # Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default # the path. We add the compiler wrapper path, which includes default

View File

@ -75,7 +75,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change Override this method to select a specific version of the toolchain or change
selection heuristics. selection heuristics.
Default is whatever version of msvc has been selected by concretization""" Default is whatever version of msvc has been selected by concretization"""
return "v" + self.pkg.compiler.platform_toolset_ver return "v" + self.spec["msvc"].package.platform_toolset_ver
@property @property
def std_msbuild_args(self): def std_msbuild_args(self):

View File

@ -140,7 +140,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh $ source {prefix}/{component}/{version}/env/vars.sh
""" """
# Only if environment modifications are desired (default is +envmods) # Only if environment modifications are desired (default is +envmods)
if "~envmods" not in self.spec: if "+envmods" in self.spec:
env.extend( env.extend(
EnvironmentModifications.from_sourcing_file( EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args self.component_prefix.env.join("vars.sh"), *self.env_script_args

View File

@ -12,7 +12,7 @@
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.spec import spack.spec
from spack.cmd.common import arguments from spack.cmd.common import arguments
@ -88,7 +88,7 @@ def compiler_find(args):
) )
paths = args.add_paths or None paths = args.add_paths or None
new_compilers = spack.compilers.find_compilers( new_compilers = spack.compilers.config.find_compilers(
path_hints=paths, scope=args.scope, max_workers=args.jobs path_hints=paths, scope=args.scope, max_workers=args.jobs
) )
if new_compilers: if new_compilers:
@ -101,11 +101,11 @@ def compiler_find(args):
else: else:
tty.msg("Found no new compilers") tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:") tty.msg("Compilers are defined in the following files:")
colify(spack.compilers.compiler_config_files(), indent=4) colify(spack.compilers.config.compiler_config_files(), indent=4)
def compiler_remove(args): def compiler_remove(args):
remover = spack.compilers.CompilerRemover(spack.config.CONFIG) remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG)
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope) candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope)
if not candidates: if not candidates:
tty.die(f"No compiler matches '{args.compiler_spec}'") tty.die(f"No compiler matches '{args.compiler_spec}'")
@ -133,7 +133,7 @@ def compiler_remove(args):
def compiler_info(args): def compiler_info(args):
"""Print info about all compilers matching a spec.""" """Print info about all compilers matching a spec."""
query = spack.spec.Spec(args.compiler_spec) query = spack.spec.Spec(args.compiler_spec)
all_compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False) all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
compilers = [x for x in all_compilers if x.satisfies(query)] compilers = [x for x in all_compilers if x.satisfies(query)]
@ -171,7 +171,7 @@ def compiler_info(args):
def compiler_list(args): def compiler_list(args):
compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False) compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
# If there are no compilers in any scope, and we're outputting to a tty, give a # If there are no compilers in any scope, and we're outputting to a tty, give a
# hint to the user. # hint to the user.
@ -184,7 +184,7 @@ def compiler_list(args):
tty.msg(msg) tty.msg(msg)
return return
index = index_by(compilers, spack.compilers.name_os_target) index = index_by(compilers, spack.compilers.config.name_os_target)
tty.msg("Available compilers") tty.msg("Available compilers")

View File

@ -2,424 +2,3 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains functions related to finding compilers on the system,
and configuring Spack to use multiple compilers.
"""
import os
import re
import sys
import warnings
from typing import Any, Dict, List, Optional, Tuple
import archspec.cpu
import llnl.util.filesystem as fs
import llnl.util.lang
import llnl.util.tty as tty
import spack.config
import spack.error
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
from spack.operating_systems import windows_os
from spack.util.environment import get_path
package_name_to_compiler_name = {
"llvm": "clang",
"intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc",
"intel-oneapi-compilers-classic": "intel",
"acfl": "arm",
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def compiler_config_files():
config_files = []
configuration = spack.config.CONFIG
for scope in configuration.writable_scopes:
name = scope.name
from_packages_yaml = CompilerFactory.from_packages_yaml(configuration, scope=name)
if from_packages_yaml:
config_files.append(configuration.get_config_filename(name, "packages"))
compiler_config = configuration.get("compilers", scope=name)
if compiler_config:
config_files.append(configuration.get_config_filename(name, "compilers"))
return config_files
def add_compiler_to_config(compiler, scope=None) -> None:
"""Add a Compiler object to the configuration, at the required scope."""
# FIXME (compiler as nodes): still needed to read Cray manifest
raise NotImplementedError("'add_compiler_to_config' node implemented yet.")
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
max_workers: Optional[int] = None,
) -> List["spack.spec.Spec"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
max_workers: number of processes used to search for compilers
"""
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
)
new_compilers = spack.detection.update_configuration(
detected_packages, buildable=True, scope=scope
)
return new_compilers
def select_new_compilers(compilers, scope=None):
"""Given a list of compilers, remove those that are already defined in
the configuration.
"""
# FIXME (compiler as nodes): still needed to read Cray manifest
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
same_specs = compilers_for_spec(
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
)
if not same_specs:
compilers_not_in_config.append(c)
return compilers_not_in_config
def supported_compilers() -> List[str]:
"""Returns all the currently supported compiler packages"""
return sorted(spack.repo.PATH.packages_with_tags(COMPILER_TAG))
def all_compilers(
scope: Optional[str] = None, init_config: bool = True
) -> List["spack.spec.Spec"]:
"""Returns all the compilers from the current global configuration.
Args:
scope: configuration scope from which to extract the compilers. If None, the merged
configuration is used.
init_config: if True, search for compilers if none is found in configuration.
"""
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
if not compilers and init_config:
find_compilers(scope=scope)
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
return compilers
def all_compilers_from(
configuration: "spack.config.ConfigurationType", scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns all the compilers from the current global configuration.
Args:
configuration: configuration to be queried
scope: configuration scope from which to extract the compilers. If None, the merged
configuration is used.
"""
compilers = CompilerFactory.from_packages_yaml(configuration, scope=scope)
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") != "1":
legacy_compilers = CompilerFactory.from_compilers_yaml(configuration, scope=scope)
if legacy_compilers:
# FIXME (compiler as nodes): write how to update the file. Maybe an ad-hoc command
warnings.warn(
"Some compilers are still defined in 'compilers.yaml', which has been deprecated "
"in v0.23. Those configuration files will be ignored from Spack v0.25.\n"
)
for legacy in legacy_compilers:
if not any(c.satisfies(f"{legacy.name}@{legacy.versions}") for c in compilers):
compilers.append(legacy)
return compilers
class CompilerRemover:
"""Removes compiler from configuration."""
def __init__(self, configuration: "spack.config.ConfigurationType") -> None:
self.configuration = configuration
self.marked_packages_yaml: List[Tuple[str, Any]] = []
self.marked_compilers_yaml: List[Tuple[str, Any]] = []
def mark_compilers(
self, *, match: str, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Marks compilers to be removed in configuration, and returns a corresponding list
of specs.
Args:
match: constraint that the compiler must match to be removed.
scope: scope where to remove the compiler. If None, all writeable scopes are checked.
"""
self.marked_packages_yaml = []
self.marked_compilers_yaml = []
candidate_scopes = [scope]
if scope is None:
candidate_scopes = [x.name for x in self.configuration.writable_scopes]
all_removals = self._mark_in_packages_yaml(match, candidate_scopes)
all_removals.extend(self._mark_in_compilers_yaml(match, candidate_scopes))
return all_removals
def _mark_in_packages_yaml(self, match, candidate_scopes):
compiler_package_names = supported_compilers()
all_removals = []
for current_scope in candidate_scopes:
packages_yaml = self.configuration.get("packages", scope=current_scope)
if not packages_yaml:
continue
removed_from_scope = []
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
def _partition_match(external_yaml):
s = CompilerFactory.from_external_yaml(external_yaml)
return not s.satisfies(match)
to_keep, to_remove = llnl.util.lang.stable_partition(
externals_config, _partition_match
)
if not to_remove:
continue
removed_from_scope.extend(to_remove)
entry["externals"] = to_keep
if not removed_from_scope:
continue
self.marked_packages_yaml.append((current_scope, packages_yaml))
all_removals.extend(
[CompilerFactory.from_external_yaml(x) for x in removed_from_scope]
)
return all_removals
def _mark_in_compilers_yaml(self, match, candidate_scopes):
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") == "1":
return []
all_removals = []
for current_scope in candidate_scopes:
compilers_yaml = self.configuration.get("compilers", scope=current_scope)
if not compilers_yaml:
continue
def _partition_match(entry):
external_specs = CompilerFactory.from_legacy_yaml(entry["compiler"])
return not any(x.satisfies(match) for x in external_specs)
to_keep, to_remove = llnl.util.lang.stable_partition(compilers_yaml, _partition_match)
if not to_remove:
continue
compilers_yaml[:] = to_keep
self.marked_compilers_yaml.append((current_scope, compilers_yaml))
for entry in to_remove:
all_removals.extend(CompilerFactory.from_legacy_yaml(entry["compiler"]))
return all_removals
def flush(self):
"""Removes from configuration the specs that have been marked by the previous call
of ``remove_compilers``.
"""
for scope, packages_yaml in self.marked_packages_yaml:
self.configuration.set("packages", packages_yaml, scope=scope)
for scope, compilers_yaml in self.marked_compilers_yaml:
self.configuration.set("compilers", compilers_yaml, scope=scope)
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
# FIXME (compiler as nodes): to be removed, or reimplemented
raise NotImplementedError("still to be implemented")
def compilers_for_arch(arch_spec, scope=None):
# FIXME (compiler as nodes): this needs a better implementation
compilers = all_compilers_from(spack.config.CONFIG, scope=scope)
result = []
for candidate in compilers:
_, operating_system, target = name_os_target(candidate)
same_os = operating_system == str(arch_spec.os)
same_target = str(archspec.cpu.TARGETS.get(target)) == str(arch_spec.target)
if not same_os or not same_target:
continue
result.append(candidate)
return result
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
# FIXME (compiler as nodes): to be removed, or reimplemented
raise NotImplementedError("still to be implemented")
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
def name_os_target(spec: "spack.spec.Spec") -> Tuple[str, str, str]:
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target")
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return spec.name, str(operating_system), str(target)
class CompilerFactory:
"""Class aggregating all ways of constructing a list of compiler specs from config entries."""
_PACKAGES_YAML_CACHE = {}
_COMPILERS_YAML_CACHE = {}
@staticmethod
def from_packages_yaml(
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns the compiler specs defined in the "packages" section of the configuration"""
compilers = []
compiler_package_names = supported_compilers()
packages_yaml = configuration.get("packages", scope=scope)
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
compiler_specs = []
for current_external in externals_config:
key = str(current_external)
if key not in CompilerFactory._PACKAGES_YAML_CACHE:
CompilerFactory._PACKAGES_YAML_CACHE[key] = CompilerFactory.from_external_yaml(
current_external
)
compiler = CompilerFactory._PACKAGES_YAML_CACHE[key]
if compiler:
compiler_specs.append(compiler)
compilers.extend(compiler_specs)
return compilers
@staticmethod
def from_external_yaml(config: Dict[str, Any]) -> Optional["spack.spec.Spec"]:
"""Returns a compiler spec from an external definition from packages.yaml."""
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_path=config.get("prefix"),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
if result.architecture:
result.architecture.complete_with_defaults()
result._finalize_concretization()
return result
@staticmethod
def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List["spack.spec.Spec"]:
"""Returns a list of external specs, corresponding to a compiler entry
from compilers.yaml.
"""
from spack.detection.path import ExecutablesFinder
# FIXME (compiler as nodes): should we look at targets too?
result = []
candidate_paths = [x for x in compiler_dict["paths"].values() if x is not None]
finder = ExecutablesFinder()
for pkg_name in spack.repo.PATH.packages_with_tags("compiler"):
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
result.extend(detected)
for item in result:
if item.architecture:
item.architecture.complete_with_defaults()
item._finalize_concretization()
return result
@staticmethod
def from_compilers_yaml(
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns the compiler specs defined in the "compilers" section of the configuration"""
result = []
for item in configuration.get("compilers", scope=scope):
key = str(item)
if key not in CompilerFactory._COMPILERS_YAML_CACHE:
CompilerFactory._COMPILERS_YAML_CACHE[key] = CompilerFactory.from_legacy_yaml(
item["compiler"]
)
result.extend(CompilerFactory._COMPILERS_YAML_CACHE[key])
return result
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super().__init__(f"Spack doesn't support the requested compiler: {compiler_name}")

View File

@ -0,0 +1,432 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains functions related to finding compilers on the system,
and configuring Spack to use multiple compilers.
"""
import os
import re
import sys
import warnings
from typing import Any, Dict, List, Optional, Tuple
import archspec.cpu
import llnl.util.filesystem as fs
import llnl.util.lang
import llnl.util.tty as tty
import spack.config
import spack.detection
import spack.error
import spack.platforms
import spack.repo
import spack.spec
from spack.operating_systems import windows_os
from spack.util.environment import get_path
package_name_to_compiler_name = {
"llvm": "clang",
"intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc",
"intel-oneapi-compilers-classic": "intel",
"acfl": "arm",
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def compiler_config_files():
config_files = []
configuration = spack.config.CONFIG
for scope in configuration.writable_scopes:
name = scope.name
from_packages_yaml = CompilerFactory.from_packages_yaml(configuration, scope=name)
if from_packages_yaml:
config_files.append(configuration.get_config_filename(name, "packages"))
compiler_config = configuration.get("compilers", scope=name)
if compiler_config:
config_files.append(configuration.get_config_filename(name, "compilers"))
return config_files
def add_compiler_to_config(new_compilers, *, scope=None) -> None:
"""Add a Compiler object to the configuration, at the required scope."""
# FIXME (compiler as nodes): still needed to read Cray manifest
by_name: Dict[str, List["spack.spec.Spec"]] = {}
for x in new_compilers:
by_name.setdefault(x.name, []).append(x)
spack.detection.update_configuration(by_name, buildable=True, scope=scope)
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
max_workers: Optional[int] = None,
) -> List["spack.spec.Spec"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
max_workers: number of processes used to search for compilers
"""
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
)
new_compilers = spack.detection.update_configuration(
detected_packages, buildable=True, scope=scope
)
return new_compilers
def select_new_compilers(
candidates: List["spack.spec.Spec"], *, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Given a list of compilers, remove those that are already defined in
the configuration.
"""
compilers_in_config = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
return [c for c in candidates if c not in compilers_in_config]
def supported_compilers() -> List[str]:
"""Returns all the currently supported compiler packages"""
return sorted(spack.repo.PATH.packages_with_tags(COMPILER_TAG))
def all_compilers(
scope: Optional[str] = None, init_config: bool = True
) -> List["spack.spec.Spec"]:
"""Returns all the compilers from the current global configuration.
Args:
scope: configuration scope from which to extract the compilers. If None, the merged
configuration is used.
init_config: if True, search for compilers if none is found in configuration.
"""
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
if not compilers and init_config:
find_compilers(scope=scope)
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
return compilers
def all_compilers_from(
configuration: "spack.config.ConfigurationType", scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns all the compilers from the current global configuration.
Args:
configuration: configuration to be queried
scope: configuration scope from which to extract the compilers. If None, the merged
configuration is used.
"""
compilers = CompilerFactory.from_packages_yaml(configuration, scope=scope)
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") != "1":
legacy_compilers = CompilerFactory.from_compilers_yaml(configuration, scope=scope)
if legacy_compilers:
# FIXME (compiler as nodes): write how to update the file. Maybe an ad-hoc command
warnings.warn(
"Some compilers are still defined in 'compilers.yaml', which has been deprecated "
"in v0.23. Those configuration files will be ignored from Spack v0.25.\n"
)
for legacy in legacy_compilers:
if not any(c.satisfies(f"{legacy.name}@{legacy.versions}") for c in compilers):
compilers.append(legacy)
return compilers
class CompilerRemover:
"""Removes compiler from configuration."""
def __init__(self, configuration: "spack.config.ConfigurationType") -> None:
self.configuration = configuration
self.marked_packages_yaml: List[Tuple[str, Any]] = []
self.marked_compilers_yaml: List[Tuple[str, Any]] = []
def mark_compilers(
self, *, match: str, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Marks compilers to be removed in configuration, and returns a corresponding list
of specs.
Args:
match: constraint that the compiler must match to be removed.
scope: scope where to remove the compiler. If None, all writeable scopes are checked.
"""
self.marked_packages_yaml = []
self.marked_compilers_yaml = []
candidate_scopes = [scope]
if scope is None:
candidate_scopes = [x.name for x in self.configuration.writable_scopes]
all_removals = self._mark_in_packages_yaml(match, candidate_scopes)
all_removals.extend(self._mark_in_compilers_yaml(match, candidate_scopes))
return all_removals
def _mark_in_packages_yaml(self, match, candidate_scopes):
compiler_package_names = supported_compilers()
all_removals = []
for current_scope in candidate_scopes:
packages_yaml = self.configuration.get("packages", scope=current_scope)
if not packages_yaml:
continue
removed_from_scope = []
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
def _partition_match(external_yaml):
s = CompilerFactory.from_external_yaml(external_yaml)
return not s.satisfies(match)
to_keep, to_remove = llnl.util.lang.stable_partition(
externals_config, _partition_match
)
if not to_remove:
continue
removed_from_scope.extend(to_remove)
entry["externals"] = to_keep
if not removed_from_scope:
continue
self.marked_packages_yaml.append((current_scope, packages_yaml))
all_removals.extend(
[CompilerFactory.from_external_yaml(x) for x in removed_from_scope]
)
return all_removals
def _mark_in_compilers_yaml(self, match, candidate_scopes):
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") == "1":
return []
all_removals = []
for current_scope in candidate_scopes:
compilers_yaml = self.configuration.get("compilers", scope=current_scope)
if not compilers_yaml:
continue
def _partition_match(entry):
external_specs = CompilerFactory.from_legacy_yaml(entry["compiler"])
return not any(x.satisfies(match) for x in external_specs)
to_keep, to_remove = llnl.util.lang.stable_partition(compilers_yaml, _partition_match)
if not to_remove:
continue
compilers_yaml[:] = to_keep
self.marked_compilers_yaml.append((current_scope, compilers_yaml))
for entry in to_remove:
all_removals.extend(CompilerFactory.from_legacy_yaml(entry["compiler"]))
return all_removals
def flush(self):
"""Removes from configuration the specs that have been marked by the previous call
of ``remove_compilers``.
"""
for scope, packages_yaml in self.marked_packages_yaml:
self.configuration.set("packages", packages_yaml, scope=scope)
for scope, compilers_yaml in self.marked_compilers_yaml:
self.configuration.set("compilers", compilers_yaml, scope=scope)
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
# FIXME (compiler as nodes): to be removed, or reimplemented
raise NotImplementedError("still to be implemented")
def compilers_for_arch(arch_spec, scope=None):
# FIXME (compiler as nodes): this needs a better implementation
compilers = all_compilers_from(spack.config.CONFIG, scope=scope)
result = []
for candidate in compilers:
_, operating_system, target = name_os_target(candidate)
same_os = operating_system == str(arch_spec.os)
same_target = str(archspec.cpu.TARGETS.get(target)) == str(arch_spec.target)
if not same_os or not same_target:
continue
result.append(candidate)
return result
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
# FIXME (compiler as nodes): to be removed, or reimplemented
raise NotImplementedError("still to be implemented")
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
def name_os_target(spec: "spack.spec.Spec") -> Tuple[str, str, str]:
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target")
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return spec.name, str(operating_system), str(target)
class CompilerFactory:
"""Class aggregating all ways of constructing a list of compiler specs from config entries."""
_PACKAGES_YAML_CACHE: Dict[str, Optional["spack.spec.Spec"]] = {}
_COMPILERS_YAML_CACHE: Dict[str, List["spack.spec.Spec"]] = {}
_GENERIC_TARGET = None
@staticmethod
def from_packages_yaml(
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns the compiler specs defined in the "packages" section of the configuration"""
compilers = []
compiler_package_names = supported_compilers()
packages_yaml = configuration.get("packages", scope=scope)
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
compiler_specs = []
for current_external in externals_config:
key = str(current_external)
if key not in CompilerFactory._PACKAGES_YAML_CACHE:
CompilerFactory._PACKAGES_YAML_CACHE[key] = CompilerFactory.from_external_yaml(
current_external
)
compiler = CompilerFactory._PACKAGES_YAML_CACHE[key]
if compiler:
compiler_specs.append(compiler)
compilers.extend(compiler_specs)
return compilers
@staticmethod
def from_external_yaml(config: Dict[str, Any]) -> Optional["spack.spec.Spec"]:
"""Returns a compiler spec from an external definition from packages.yaml."""
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_path=config.get("prefix"),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
CompilerFactory._finalize_external_concretization(result)
return result
@staticmethod
def _finalize_external_concretization(abstract_spec):
if CompilerFactory._GENERIC_TARGET is None:
CompilerFactory._GENERIC_TARGET = archspec.cpu.host().family
if abstract_spec.architecture:
abstract_spec.architecture.complete_with_defaults()
else:
abstract_spec.constrain(spack.spec.Spec.default_arch())
abstract_spec.architecture.target = CompilerFactory._GENERIC_TARGET
abstract_spec._finalize_concretization()
@staticmethod
def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List["spack.spec.Spec"]:
"""Returns a list of external specs, corresponding to a compiler entry
from compilers.yaml.
"""
from spack.detection.path import ExecutablesFinder
# FIXME (compiler as nodes): should we look at targets too?
result = []
candidate_paths = [x for x in compiler_dict["paths"].values() if x is not None]
finder = ExecutablesFinder()
for pkg_name in spack.repo.PATH.packages_with_tags("compiler"):
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
result.extend(detected)
for item in result:
CompilerFactory._finalize_external_concretization(item)
return result
@staticmethod
def from_compilers_yaml(
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
) -> List["spack.spec.Spec"]:
"""Returns the compiler specs defined in the "compilers" section of the configuration"""
result: List["spack.spec.Spec"] = []
for item in configuration.get("compilers", scope=scope):
key = str(item)
if key not in CompilerFactory._COMPILERS_YAML_CACHE:
CompilerFactory._COMPILERS_YAML_CACHE[key] = CompilerFactory.from_legacy_yaml(
item["compiler"]
)
result.extend(CompilerFactory._COMPILERS_YAML_CACHE[key])
return result
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super().__init__(f"Spack doesn't support the requested compiler: {compiler_name}")

View File

@ -0,0 +1,23 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from ..error import SpackError
class CompilerAccessError(SpackError):
def __init__(self, compiler, paths):
super().__init__(
f"Compiler '{compiler.spec}' has executables that are missing"
f" or are not executable: {paths}"
)
class UnsupportedCompilerFlag(SpackError):
def __init__(self, compiler, feature, flag_name, ver_string=None):
super().__init__(
f"{compiler.name} ({ver_string if ver_string else compiler.version}) does not support"
f" {feature} (as compiler.{flag_name}). If you think it should, please edit the "
f"compiler.{compiler.name} subclass to implement the {flag_name} property and submit "
f"a pull request or issue."
)

View File

@ -0,0 +1,26 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import List, Tuple
def tokenize_flags(flags_values: str, propagate: bool = False) -> List[Tuple[str, bool]]:
"""Given a compiler flag specification as a string, this returns a list
where the entries are the flags. For compiler options which set values
using the syntax "-flag value", this function groups flags and their
values together. Any token not preceded by a "-" is considered the
value of a prior flag."""
tokens = flags_values.split()
if not tokens:
return []
flag = tokens[0]
flags_with_propagation = []
for token in tokens[1:]:
if not token.startswith("-"):
flag += " " + token
else:
flags_with_propagation.append((flag, propagate))
flag = token
flags_with_propagation.append((flag, propagate))
return flags_with_propagation

View File

@ -0,0 +1,426 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import hashlib
import json
import os
import re
import shutil
import stat
import sys
import tempfile
import typing
from typing import Dict, List, Optional, Set, Tuple
import llnl.path
import llnl.util.lang
from llnl.util import tty
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
import spack.caches
import spack.util.libc
from spack.util.environment import filter_system_paths
from spack.util.file_cache import FileCache
if typing.TYPE_CHECKING:
import spack.spec
#: regex for parsing linker lines
_LINKER_LINE = re.compile(r"^( *|.*[/\\])" r"(link|ld|([^/\\]+-)?ld|collect2)" r"[^/\\]*( |$)")
#: components of linker lines to ignore
_LINKER_LINE_IGNORE = re.compile(r"(collect2 version|^[A-Za-z0-9_]+=|/ldfe )")
#: regex to match linker search paths
_LINK_DIR_ARG = re.compile(r"^-L(.:)?(?P<dir>[/\\].*)")
#: regex to match linker library path arguments
_LIBPATH_ARG = re.compile(r"^[-/](LIBPATH|libpath):(?P<dir>.*)")
@llnl.path.system_path_filter
def parse_non_system_link_dirs(compiler_debug_output: str) -> List[str]:
"""Parses link paths out of compiler debug output.
Args:
compiler_debug_output: compiler debug output as a string
Returns:
Implicit link paths parsed from the compiler output
"""
link_dirs = _parse_link_paths(compiler_debug_output)
# Remove directories that do not exist. Some versions of the Cray compiler
# report nonexistent directories
link_dirs = filter_non_existing_dirs(link_dirs)
# Return set of directories containing needed compiler libs, minus
# system paths. Note that 'filter_system_paths' only checks for an
# exact match, while 'in_system_subdirectory' checks if a path contains
# a system directory as a subdirectory
link_dirs = filter_system_paths(link_dirs)
return list(p for p in link_dirs if not in_system_subdirectory(p))
def filter_non_existing_dirs(dirs):
return [d for d in dirs if os.path.isdir(d)]
def in_system_subdirectory(path):
system_dirs = [
"/lib/",
"/lib64/",
"/usr/lib/",
"/usr/lib64/",
"/usr/local/lib/",
"/usr/local/lib64/",
]
return any(path_contains_subdirectory(path, x) for x in system_dirs)
def _parse_link_paths(string):
"""Parse implicit link paths from compiler debug output.
This gives the compiler runtime library paths that we need to add to
the RPATH of generated binaries and libraries. It allows us to
ensure, e.g., that codes load the right libstdc++ for their compiler.
"""
lib_search_paths = False
raw_link_dirs = []
for line in string.splitlines():
if lib_search_paths:
if line.startswith("\t"):
raw_link_dirs.append(line[1:])
continue
else:
lib_search_paths = False
elif line.startswith("Library search paths:"):
lib_search_paths = True
if not _LINKER_LINE.match(line):
continue
if _LINKER_LINE_IGNORE.match(line):
continue
tty.debug(f"implicit link dirs: link line: {line}")
next_arg = False
for arg in line.split():
if arg in ("-L", "-Y"):
next_arg = True
continue
if next_arg:
raw_link_dirs.append(arg)
next_arg = False
continue
link_dir_arg = _LINK_DIR_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
raw_link_dirs.append(link_dir)
link_dir_arg = _LIBPATH_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
raw_link_dirs.append(link_dir)
implicit_link_dirs = list()
visited = set()
for link_dir in raw_link_dirs:
normalized_path = os.path.abspath(link_dir)
if normalized_path not in visited:
implicit_link_dirs.append(normalized_path)
visited.add(normalized_path)
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
return implicit_link_dirs
class CompilerPropertyDetector:
def __init__(self, compiler_spec: "spack.spec.Spec"):
assert compiler_spec.external, "only external compiler specs are allowed, so far"
assert compiler_spec.concrete, "only concrete compiler specs are allowed, so far"
self.spec = compiler_spec
self.cache = COMPILER_CACHE
@contextlib.contextmanager
def compiler_environment(self):
"""Sets the environment to run this compiler"""
import spack.schema.environment
import spack.util.module_cmd
# Avoid modifying os.environ if possible.
environment = self.spec.extra_attributes.get("environment", {})
modules = self.spec.external_modules or []
if not self.spec.external_modules and not environment:
yield
return
# store environment to replace later
backup_env = os.environ.copy()
try:
# load modules and set env variables
for module in modules:
spack.util.module_cmd.load_module(module)
# apply other compiler environment changes
spack.schema.environment.parse(environment).apply_modifications()
yield
finally:
# Restore environment regardless of whether inner code succeeded
os.environ.clear()
os.environ.update(backup_env)
def _compile_dummy_c_source(self) -> Optional[str]:
import spack.util.executable
assert self.spec.external, "only external compiler specs are allowed, so far"
compiler_pkg = self.spec.package
if getattr(compiler_pkg, "cc"):
cc = compiler_pkg.cc
ext = "c"
else:
cc = compiler_pkg.cxx
ext = "cc"
if not cc or not self.spec.package.verbose_flags:
return None
try:
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
fout = os.path.join(tmpdir, "output")
fin = os.path.join(tmpdir, f"main.{ext}")
with open(fin, "w") as csource:
csource.write(
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
)
cc_exe = spack.util.executable.Executable(cc)
# FIXME (compiler as nodes): this operation should be encapsulated somewhere else
compiler_flags = self.spec.extra_attributes.get("flags", {})
for flag_type in [
"cflags" if cc == compiler_pkg.cc else "cxxflags",
"cppflags",
"ldflags",
]:
current_flags = compiler_flags.get(flag_type, "").strip()
if current_flags:
cc_exe.add_default_arg(*current_flags.split(" "))
with self.compiler_environment():
return cc_exe("-v", fin, "-o", fout, output=str, error=str)
except spack.util.executable.ProcessError as pe:
tty.debug(f"ProcessError: Command exited with non-zero status: {pe.long_message}")
return None
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
def compiler_verbose_output(self) -> Optional[str]:
return self.cache.get(self.spec).c_compiler_output
def default_dynamic_linker(self) -> Optional[str]:
output = self.compiler_verbose_output()
if not output:
return None
return spack.util.libc.parse_dynamic_linker(output)
def default_libc(self) -> Optional["spack.spec.Spec"]:
"""Determine libc targeted by the compiler from link line"""
# technically this should be testing the target platform of the compiler, but we don't have
# that, so stick to host platform for now.
if sys.platform in ("darwin", "win32"):
return None
dynamic_linker = self.default_dynamic_linker()
if dynamic_linker is None:
return None
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
def implicit_rpaths(self) -> List[str]:
output = self.compiler_verbose_output()
if output is None:
return []
link_dirs = parse_non_system_link_dirs(output)
all_required_libs = list(self.spec.package.required_libs) + ["libc", "libc++", "libstdc++"]
dynamic_linker = self.default_dynamic_linker()
# FIXME (compiler as nodes): is this needed ?
# if dynamic_linker is None:
# return []
result = DefaultDynamicLinkerFilter(dynamic_linker)(
paths_containing_libs(link_dirs, all_required_libs)
)
return list(result)
class DefaultDynamicLinkerFilter:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
_CACHE: Dict[Optional[str], Set[Tuple[int, int]]] = {}
def __init__(self, dynamic_linker: Optional[str]) -> None:
if dynamic_linker not in DefaultDynamicLinkerFilter._CACHE:
# Identify directories by (inode, device) tuple, which handles symlinks too.
default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
self.default_path_identifiers = None
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
DefaultDynamicLinkerFilter._CACHE[dynamic_linker] = default_path_identifiers
self.default_path_identifiers = DefaultDynamicLinkerFilter._CACHE[dynamic_linker]
def is_dynamic_loader_default_path(self, p: str) -> bool:
if self.default_path_identifiers is None:
return False
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def dynamic_linker_filter_for(node: "spack.spec.Spec") -> Optional[DefaultDynamicLinkerFilter]:
compiler = compiler_spec(node)
if compiler is None:
return None
detector = CompilerPropertyDetector(compiler)
dynamic_linker = detector.default_dynamic_linker()
if dynamic_linker is None:
return None
return DefaultDynamicLinkerFilter(dynamic_linker)
def compiler_spec(node: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
"""Returns the compiler spec associated with the node passed as argument.
The function looks for a "c", "cxx", and "fortran" compiler in that order,
and returns the first found. If none is found, returns None.
"""
for language in ("c", "cxx", "fortran"):
candidates = node.dependencies(virtuals=[language])
if candidates:
break
else:
return None
return candidates[0]
class CompilerCacheEntry:
"""Deserialized cache entry for a compiler"""
__slots__ = ["c_compiler_output"]
def __init__(self, c_compiler_output: Optional[str]):
self.c_compiler_output = c_compiler_output
@classmethod
def from_dict(cls, data: Dict[str, Optional[str]]):
if not isinstance(data, dict):
raise ValueError(f"Invalid {cls.__name__} data")
c_compiler_output = data.get("c_compiler_output")
if not isinstance(c_compiler_output, (str, type(None))):
raise ValueError(f"Invalid {cls.__name__} data")
return cls(c_compiler_output)
class CompilerCache:
"""Base class for compiler output cache. Default implementation does not cache anything."""
def value(self, compiler: "spack.spec.Spec") -> Dict[str, Optional[str]]:
return {"c_compiler_output": CompilerPropertyDetector(compiler)._compile_dummy_c_source()}
def get(self, compiler: "spack.spec.Spec") -> CompilerCacheEntry:
return CompilerCacheEntry.from_dict(self.value(compiler))
class FileCompilerCache(CompilerCache):
"""Cache for compiler output, which is used to determine implicit link paths, the default libc
version, and the compiler version."""
name = os.path.join("compilers", "compilers.json")
def __init__(self, cache: "FileCache") -> None:
self.cache = cache
self.cache.init_entry(self.name)
self._data: Dict[str, Dict[str, Optional[str]]] = {}
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
try:
return CompilerCacheEntry.from_dict(self._data[key])
except ValueError:
del self._data[key]
except KeyError:
pass
return None
def get(self, compiler: "spack.spec.Spec") -> CompilerCacheEntry:
# Cache hit
try:
with self.cache.read_transaction(self.name) as f:
assert f is not None
self._data = json.loads(f.read())
assert isinstance(self._data, dict)
except (json.JSONDecodeError, AssertionError):
self._data = {}
key = self._key(compiler)
value = self._get_entry(key)
if value is not None:
return value
# Cache miss
with self.cache.write_transaction(self.name) as (old, new):
try:
assert old is not None
self._data = json.loads(old.read())
assert isinstance(self._data, dict)
except (json.JSONDecodeError, AssertionError):
self._data = {}
# Use cache entry that may have been created by another process in the meantime.
entry = self._get_entry(key)
# Finally compute the cache entry
if entry is None:
self._data[key] = self.value(compiler)
entry = CompilerCacheEntry.from_dict(self._data[key])
new.write(json.dumps(self._data, separators=(",", ":")))
return entry
def _key(self, compiler: "spack.spec.Spec") -> str:
as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8")
return hashlib.sha256(as_bytes).hexdigest()
def _make_compiler_cache():
return FileCompilerCache(spack.caches.MISC_CACHE)
COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore

View File

@ -11,6 +11,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.compilers import spack.compilers
import spack.compilers.config
import spack.config import spack.config
import spack.error import spack.error
import spack.repo import spack.repo
@ -146,7 +147,7 @@ def concretize_separately(
# Ensure we have compilers in compilers.yaml to avoid that # Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel # processes try to write the config file in parallel
_ = spack.compilers.all_compilers_config(spack.config.CONFIG) _ = spack.compilers.config.all_compilers_from(spack.config.CONFIG)
# Early return if there is nothing to do # Early return if there is nothing to do
if len(args) == 0: if len(args) == 0:

View File

@ -14,7 +14,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.cmd import spack.cmd
import spack.compilers import spack.compilers.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
import spack.hash_types as hash_types import spack.hash_types as hash_types
@ -34,7 +34,7 @@
def translated_compiler_name(manifest_compiler_name): def translated_compiler_name(manifest_compiler_name):
""" """
When creating a Compiler object, Spack expects a name matching When creating a Compiler object, Spack expects a name matching
one of the classes in `spack.compilers`. Names in the Cray manifest one of the classes in `spack.compilers.config`. Names in the Cray manifest
may differ; for cases where we know the name refers to a compiler in may differ; for cases where we know the name refers to a compiler in
Spack, this function translates it automatically. Spack, this function translates it automatically.
@ -43,10 +43,10 @@ def translated_compiler_name(manifest_compiler_name):
""" """
if manifest_compiler_name in compiler_name_translation: if manifest_compiler_name in compiler_name_translation:
return compiler_name_translation[manifest_compiler_name] return compiler_name_translation[manifest_compiler_name]
elif manifest_compiler_name in spack.compilers.supported_compilers(): elif manifest_compiler_name in spack.compilers.config.supported_compilers():
return manifest_compiler_name return manifest_compiler_name
else: else:
raise spack.compilers.UnknownCompilerError( raise spack.compilers.config.UnknownCompilerError(
"Manifest parsing - unknown compiler: {0}".format(manifest_compiler_name) "Manifest parsing - unknown compiler: {0}".format(manifest_compiler_name)
) )
@ -80,7 +80,7 @@ def compiler_from_entry(entry: dict, manifest_path: str):
operating_system = arch["os"] operating_system = arch["os"]
target = arch["target"] target = arch["target"]
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) compiler_cls = spack.compilers.config.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version) spec = spack.spec.CompilerSpec(compiler_cls.name, version)
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
@ -225,11 +225,11 @@ def read(path, apply_updates):
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"]) compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
tty.debug(f"{path}: {str(len(compilers))} compilers read from manifest") tty.debug(f"{path}: {str(len(compilers))} compilers read from manifest")
# Filter out the compilers that already appear in the configuration # Filter out the compilers that already appear in the configuration
compilers = spack.compilers.select_new_compilers(compilers) compilers = spack.compilers.config.select_new_compilers(compilers)
if apply_updates and compilers: if apply_updates and compilers:
for compiler in compilers: for compiler in compilers:
try: try:
spack.compilers.add_compiler_to_config(compiler) spack.compilers.config.add_compiler_to_config(compiler)
except Exception: except Exception:
warnings.warn( warnings.warn(
f"Could not add compiler {str(compiler.spec)}: " f"Could not add compiler {str(compiler.spec)}: "

View File

@ -24,6 +24,7 @@
import spack import spack
import spack.caches import spack.caches
import spack.compilers.config
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt

View File

@ -11,7 +11,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang as lang import llnl.util.lang as lang
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.error import spack.error
import spack.repo import spack.repo
@ -70,7 +70,7 @@ def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
List of found core compilers List of found core compilers
""" """
core_compilers = [] core_compilers = []
for compiler in spack.compilers.all_compilers(): for compiler in spack.compilers.config.all_compilers():
try: try:
# A compiler is considered to be a core compiler if any of the # A compiler is considered to be a core compiler if any of the
# C, C++ or Fortran compilers reside in a system directory # C, C++ or Fortran compilers reside in a system directory
@ -200,11 +200,11 @@ def provides(self):
# virtual dependencies in spack # virtual dependencies in spack
# If it is in the list of supported compilers family -> compiler # If it is in the list of supported compilers family -> compiler
if self.spec.name in spack.compilers.supported_compilers(): if self.spec.name in spack.compilers.config.supported_compilers():
provides["compiler"] = spack.spec.CompilerSpec(self.spec.format("{name}{@versions}")) provides["compiler"] = spack.spec.CompilerSpec(self.spec.format("{name}{@versions}"))
elif self.spec.name in spack.compilers.package_name_to_compiler_name: elif self.spec.name in spack.compilers.config.package_name_to_compiler_name:
# If it is the package for a supported compiler, but of a different name # If it is the package for a supported compiler, but of a different name
cname = spack.compilers.package_name_to_compiler_name[self.spec.name] cname = spack.compilers.config.package_name_to_compiler_name[self.spec.name]
provides["compiler"] = spack.spec.CompilerSpec(cname, self.spec.versions) provides["compiler"] = spack.spec.CompilerSpec(cname, self.spec.versions)
# All the other tokens in the hierarchy must be virtual dependencies # All the other tokens in the hierarchy must be virtual dependencies

View File

@ -32,7 +32,7 @@
from llnl.util.lang import classproperty, memoized from llnl.util.lang import classproperty, memoized
from llnl.util.link_tree import LinkTree from llnl.util.link_tree import LinkTree
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.dependency import spack.dependency
import spack.deptypes as dt import spack.deptypes as dt
@ -1617,7 +1617,7 @@ def do_stage(self, mirror_only=False):
self.stage.create() self.stage.create()
# Fetch/expand any associated code. # Fetch/expand any associated code.
if self.has_code: if self.has_code and not self.spec.external:
self.do_fetch(mirror_only) self.do_fetch(mirror_only)
self.stage.expand_archive() self.stage.expand_archive()
else: else:
@ -1948,7 +1948,7 @@ def _resource_stage(self, resource):
def do_test(self, dirty=False, externals=False): def do_test(self, dirty=False, externals=False):
if self.test_requires_compiler: if self.test_requires_compiler:
compilers = spack.compilers.compilers_for_spec( compilers = spack.compilers.config.compilers_for_spec(
self.spec.compiler, arch_spec=self.spec.architecture self.spec.compiler, arch_spec=self.spec.architecture
) )
if not compilers: if not compilers:

View File

@ -27,8 +27,8 @@
import spack import spack
import spack.binary_distribution import spack.binary_distribution
import spack.compiler import spack.compilers.config
import spack.compilers import spack.compilers.flags
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
@ -48,6 +48,7 @@
import spack.version as vn import spack.version as vn
import spack.version.git_ref_lookup import spack.version.git_ref_lookup
from spack import traverse from spack import traverse
from spack.compilers.libraries import CompilerPropertyDetector
from .core import ( from .core import (
AspFunction, AspFunction,
@ -63,7 +64,6 @@
parse_term, parse_term,
) )
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
from .libc import CompilerPropertyDetector
from .requirements import RequirementKind, RequirementParser, RequirementRule from .requirements import RequirementKind, RequirementParser, RequirementRule
from .version_order import concretization_version_order from .version_order import concretization_version_order
@ -71,9 +71,6 @@
TransformFunction = Callable[["spack.spec.Spec", List[AspFunction]], List[AspFunction]] TransformFunction = Callable[["spack.spec.Spec", List[AspFunction]], List[AspFunction]]
#: Enable the addition of a runtime node
WITH_RUNTIME = sys.platform != "win32"
#: Data class that contain configuration on what a #: Data class that contain configuration on what a
#: clingo solve should output. #: clingo solve should output.
#: #:
@ -287,7 +284,7 @@ def all_libcs() -> Set[spack.spec.Spec]:
libcs = { libcs = {
CompilerPropertyDetector(c).default_libc() CompilerPropertyDetector(c).default_libc()
for c in spack.compilers.all_compilers_from(spack.config.CONFIG) for c in spack.compilers.config.all_compilers_from(spack.config.CONFIG)
} }
libcs.discard(None) libcs.discard(None)
@ -311,7 +308,7 @@ def using_libc_compatibility() -> bool:
return spack.platforms.host().name == "linux" return spack.platforms.host().name == "linux"
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool: def c_compiler_runs(compiler) -> bool:
return CompilerPropertyDetector(compiler).compiler_verbose_output() is not None return CompilerPropertyDetector(compiler).compiler_verbose_output() is not None
@ -602,7 +599,7 @@ def _external_config_with_implicit_externals(configuration):
if not using_libc_compatibility(): if not using_libc_compatibility():
return packages_yaml return packages_yaml
for compiler in spack.compilers.all_compilers_from(configuration): for compiler in spack.compilers.config.all_compilers_from(configuration):
libc = CompilerPropertyDetector(compiler).default_libc() libc = CompilerPropertyDetector(compiler).default_libc()
if libc: if libc:
entry = {"spec": f"{libc}", "prefix": libc.external_path} entry = {"spec": f"{libc}", "prefix": libc.external_path}
@ -746,27 +743,6 @@ def on_model(model):
raise UnsatisfiableSpecError(msg) raise UnsatisfiableSpecError(msg)
class KnownCompiler(NamedTuple):
"""Data class to collect information on compilers"""
spec: "spack.spec.Spec"
os: str
target: str
available: bool
compiler_obj: Optional["spack.compiler.Compiler"]
def _key(self):
return self.spec, self.os, self.target
def __eq__(self, other: object):
if not isinstance(other, KnownCompiler):
return NotImplemented
return self._key() == other._key()
def __hash__(self):
return hash(self._key())
class PyclingoDriver: class PyclingoDriver:
def __init__(self, cores=True): def __init__(self, cores=True):
"""Driver for the Python clingo interface. """Driver for the Python clingo interface.
@ -2300,7 +2276,9 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
try: try:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("ignore") warnings.simplefilter("ignore")
target.optimization_flags(compiler_name, str(compiler_version)) target.optimization_flags(
compiler_name, compiler_version.dotted_numeric_string
)
supported.append(target) supported.append(target)
except archspec.cpu.UnsupportedMicroarchitecture: except archspec.cpu.UnsupportedMicroarchitecture:
continue continue
@ -2724,9 +2702,8 @@ def setup(
self.gen.h1("Variant Values defined in specs") self.gen.h1("Variant Values defined in specs")
self.define_variant_values() self.define_variant_values()
if WITH_RUNTIME: self.gen.h1("Runtimes")
self.gen.h1("Runtimes") self.define_runtime_constraints()
self.define_runtime_constraints()
self.gen.h1("Version Constraints") self.gen.h1("Version Constraints")
self.collect_virtual_constraints() self.collect_virtual_constraints()
@ -2776,13 +2753,16 @@ def define_runtime_constraints(self):
# Inject default flags for compilers # Inject default flags for compilers
recorder("*").default_flags(compiler) recorder("*").default_flags(compiler)
if not using_libc_compatibility():
continue
current_libc = CompilerPropertyDetector(compiler).default_libc() current_libc = CompilerPropertyDetector(compiler).default_libc()
# If this is a compiler yet to be built infer libc from the Python process # If this is a compiler yet to be built infer libc from the Python process
# FIXME (compiler as nodes): recover this use case # FIXME (compiler as nodes): recover this use case
# if not current_libc and compiler.compiler_obj.cc is None: # if not current_libc and compiler.compiler_obj.cc is None:
# current_libc = spack.util.libc.libc_from_current_python_process() # current_libc = spack.util.libc.libc_from_current_python_process()
if using_libc_compatibility() and current_libc: if current_libc:
recorder("*").depends_on( recorder("*").depends_on(
"libc", "libc",
when=f"%{compiler.name}@{compiler.versions}", when=f"%{compiler.name}@{compiler.versions}",
@ -2928,8 +2908,6 @@ class _Head:
node_os = fn.attr("node_os_set") node_os = fn.attr("node_os_set")
node_target = fn.attr("node_target_set") node_target = fn.attr("node_target_set")
variant_value = fn.attr("variant_set") variant_value = fn.attr("variant_set")
node_compiler = fn.attr("node_compiler_set")
node_compiler_version = fn.attr("node_compiler_version_set")
node_flag = fn.attr("node_flag_set") node_flag = fn.attr("node_flag_set")
propagate = fn.attr("propagate") propagate = fn.attr("propagate")
@ -2944,8 +2922,6 @@ class _Body:
node_os = fn.attr("node_os") node_os = fn.attr("node_os")
node_target = fn.attr("node_target") node_target = fn.attr("node_target")
variant_value = fn.attr("variant_value") variant_value = fn.attr("variant_value")
node_compiler = fn.attr("node_compiler")
node_compiler_version = fn.attr("node_compiler_version")
node_flag = fn.attr("node_flag") node_flag = fn.attr("node_flag")
propagate = fn.attr("propagate") propagate = fn.attr("propagate")
@ -2998,7 +2974,7 @@ def value(self) -> str:
def possible_compilers(*, configuration) -> List["spack.spec.Spec"]: def possible_compilers(*, configuration) -> List["spack.spec.Spec"]:
result = set() result = set()
for c in spack.compilers.all_compilers_from(configuration): for c in spack.compilers.config.all_compilers_from(configuration):
# FIXME (compiler as nodes): Discard early specs that are not marked for this target? # FIXME (compiler as nodes): Discard early specs that are not marked for this target?
if using_libc_compatibility() and not c_compiler_runs(c): if using_libc_compatibility() and not c_compiler_runs(c):
@ -3017,10 +2993,7 @@ def possible_compilers(*, configuration) -> List["spack.spec.Spec"]:
continue continue
if c in result: if c in result:
warnings.warn( warnings.warn(f"duplicate {c} compiler found. Edit your packages.yaml to remove it.")
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
f"Edit your compilers.yaml configuration to remove it."
)
continue continue
result.add(c) result.add(c)
@ -3129,15 +3102,20 @@ def depends_on(self, dependency_str: str, *, when: str, type: str, description:
self.reset() self.reset()
@staticmethod
def node_for(name: str) -> str:
return f'node(ID{name.replace("-", "_")}, "{name}")'
def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]: def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]:
"""Computes the rule body from a "when" spec, and returns it, along with the """Computes the rule body from a "when" spec, and returns it, along with the
node variable. node variable.
""" """
node_placeholder = "XXX" node_placeholder = "XXX"
node_variable = "node(ID, Package)" node_variable = "node(ID, Package)"
when_substitutions = {} when_substitutions = {}
for s in when_spec.traverse(root=False): for s in when_spec.traverse(root=False):
when_substitutions[f'"{s.name}"'] = f'node(ID{s.name}, "{s.name}")' when_substitutions[f'"{s.name}"'] = self.node_for(s.name)
when_spec.name = node_placeholder when_spec.name = node_placeholder
body_clauses = self._setup.spec_clauses(when_spec, body=True) body_clauses = self._setup.spec_clauses(when_spec, body=True)
for clause in body_clauses: for clause in body_clauses:
@ -3192,7 +3170,7 @@ def propagate(self, constraint_str: str, *, when: str):
when_substitutions = {} when_substitutions = {}
for s in when_spec.traverse(root=False): for s in when_spec.traverse(root=False):
when_substitutions[f'"{s.name}"'] = f'node(ID{s.name}, "{s.name}")' when_substitutions[f'"{s.name}"'] = self.node_for(s.name)
body_str, node_variable = self.rule_body_from(when_spec) body_str, node_variable = self.rule_body_from(when_spec)
constraint_spec = spack.spec.Spec(constraint_str) constraint_spec = spack.spec.Spec(constraint_str)
@ -3285,7 +3263,6 @@ class SpecBuilder:
r"^compatible_libc$", r"^compatible_libc$",
r"^dependency_holds$", r"^dependency_holds$",
r"^external_conditions_hold$", r"^external_conditions_hold$",
r"^node_compiler$",
r"^package_hash$", r"^package_hash$",
r"^root$", r"^root$",
r"^track_dependencies$", r"^track_dependencies$",
@ -3366,10 +3343,6 @@ def variant_selected(self, node, name, value, variant_type, variant_id):
def version(self, node, version): def version(self, node, version):
self._specs[node].versions = vn.VersionList([vn.Version(version)]) self._specs[node].versions = vn.VersionList([vn.Version(version)])
def node_compiler_version(self, node, compiler, version):
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
def node_flag(self, node, node_flag): def node_flag(self, node, node_flag):
self._specs[node].compiler_flags.add_flag( self._specs[node].compiler_flags.add_flag(
node_flag.flag_type, node_flag.flag, False, node_flag.flag_group, node_flag.source node_flag.flag_type, node_flag.flag, False, node_flag.flag_group, node_flag.source
@ -3481,7 +3454,7 @@ def _order_index(flag_group):
for grp in prioritized_groups: for grp in prioritized_groups:
grp_flags = tuple( grp_flags = tuple(
x for (x, y) in spack.compiler.tokenize_flags(grp.flag_group) x for (x, y) in spack.compilers.flags.tokenize_flags(grp.flag_group)
) )
if grp_flags == from_compiler: if grp_flags == from_compiler:
continue continue
@ -3586,9 +3559,8 @@ def sort_fn(function_tuple) -> Tuple[int, int]:
return (-1, 0) return (-1, 0)
def build_specs(self, function_tuples): def build_specs(self, function_tuples):
# Functions don't seem to be in particular order in output. Sort # Functions don't seem to be in particular order in output. Sort them here so that
# them here so that directives that build objects (like node and # directives that build objects, like node, are called in the right order.
# node_compiler) are called in the right order.
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn) self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
self._specs = {} self._specs = {}
for name, args in self.function_tuples: for name, args in self.function_tuples:
@ -3780,9 +3752,6 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool: def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
if not WITH_RUNTIME:
return True
if "gcc" in spec and "gcc-runtime" not in spec: if "gcc" in spec and "gcc-runtime" not in spec:
return False return False

View File

@ -1,116 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import os
import shutil
import tempfile
import typing
from typing import Optional
import llnl.util.tty as tty
import spack.util.libc
if typing.TYPE_CHECKING:
import spack.spec
class CompilerPropertyDetector:
_CACHE = {}
def __init__(self, compiler_spec: "spack.spec.Spec"):
assert compiler_spec.external, "only external compiler specs are allowed, so far"
assert compiler_spec.concrete, "only concrete compiler specs are allowed, so far"
self.spec = compiler_spec
@contextlib.contextmanager
def compiler_environment(self):
"""Sets the environment to run this compiler"""
import spack.schema.environment
import spack.util.module_cmd
# Avoid modifying os.environ if possible.
environment = self.spec.extra_attributes.get("environment", {})
modules = self.spec.external_modules or []
if not self.spec.external_modules and not environment:
yield
return
# store environment to replace later
backup_env = os.environ.copy()
try:
# load modules and set env variables
for module in modules:
spack.util.module_cmd.load_module(module)
# apply other compiler environment changes
spack.schema.environment.parse(environment).apply_modifications()
yield
finally:
# Restore environment regardless of whether inner code succeeded
os.environ.clear()
os.environ.update(backup_env)
def _compile_dummy_c_source(self) -> Optional[str]:
import spack.util.executable
assert self.spec.external, "only external compiler specs are allowed, so far"
compiler_pkg = self.spec.package
cc = compiler_pkg.cc if compiler_pkg.cc else compiler_pkg.cxx
if not cc: # or not self.spec.verbose_flag:
return None
try:
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
fout = os.path.join(tmpdir, "output")
fin = os.path.join(tmpdir, "main.c")
with open(fin, "w") as csource:
csource.write(
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
)
cc_exe = spack.util.executable.Executable(cc)
# FIXME (compiler as nodes): this operation should be encapsulated somewhere else
compiler_flags = self.spec.extra_attributes.get("flags", {})
for flag_type in [
"cflags" if cc == compiler_pkg.cc else "cxxflags",
"cppflags",
"ldflags",
]:
current_flags = compiler_flags.get(flag_type, "").strip()
if current_flags:
cc_exe.add_default_arg(*current_flags.split(" "))
with self.compiler_environment():
return cc_exe("-v", fin, "-o", fout, output=str, error=str)
except spack.util.executable.ProcessError as pe:
tty.debug(f"ProcessError: Command exited with non-zero status: {pe.long_message}")
return None
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
def compiler_verbose_output(self):
key = self.spec.dag_hash()
if key not in self._CACHE:
self._CACHE[key] = self._compile_dummy_c_source()
return self._CACHE[key]
def default_libc(self) -> Optional["spack.spec.Spec"]:
"""Determine libc targeted by the compiler from link line"""
output = self.compiler_verbose_output()
if not output:
return None
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
if not dynamic_linker:
return None
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)

View File

@ -71,8 +71,7 @@
import llnl.util.tty.color as clr import llnl.util.tty.color as clr
import spack import spack
import spack.compiler import spack.compilers.flags
import spack.compilers
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
@ -1637,7 +1636,7 @@ def _add_flag(self, name, value, propagate):
self.namespace = value self.namespace = value
elif name in valid_flags: elif name in valid_flags:
assert self.compiler_flags is not None assert self.compiler_flags is not None
flags_and_propagation = spack.compiler.tokenize_flags(value, propagate) flags_and_propagation = spack.compilers.flags.tokenize_flags(value, propagate)
flag_group = " ".join(x for (x, y) in flags_and_propagation) flag_group = " ".join(x for (x, y) in flags_and_propagation)
for flag, propagation in flags_and_propagation: for flag, propagation in flags_and_propagation:
self.compiler_flags.add_flag(name, flag, propagation, flag_group) self.compiler_flags.add_flag(name, flag, propagation, flag_group)

View File

@ -27,7 +27,7 @@
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.caches import spack.caches
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.fetch_strategy import spack.fetch_strategy
import spack.hooks.sbang as sbang import spack.hooks.sbang as sbang
@ -84,7 +84,7 @@ def config_directory(tmp_path_factory):
for name in [f"site/{platform.system().lower()}", "site", "user"] for name in [f"site/{platform.system().lower()}", "site", "user"]
] ]
with spack.config.use_configuration(*cfg_scopes): with spack.config.use_configuration(*cfg_scopes):
_ = spack.compilers.find_compilers(scope="site") _ = spack.compilers.config.find_compilers(scope="site")
yield defaults_dir yield defaults_dir

View File

@ -8,7 +8,7 @@
import spack.bootstrap import spack.bootstrap
import spack.bootstrap.config import spack.bootstrap.config
import spack.bootstrap.core import spack.bootstrap.core
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.environment import spack.environment
import spack.store import spack.store
@ -129,10 +129,10 @@ def test_bootstrap_disables_modulefile_generation(mutable_config):
@pytest.mark.regression("25992") @pytest.mark.regression("25992")
@pytest.mark.requires_executables("gcc") @pytest.mark.requires_executables("gcc")
def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml): def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml):
assert not spack.compilers.all_compilers(init_config=False) assert not spack.compilers.config.all_compilers(init_config=False)
with spack.bootstrap.ensure_bootstrap_configuration(): with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.compilers.all_compilers(init_config=False) assert spack.compilers.config.all_compilers(init_config=False)
assert not spack.compilers.all_compilers(init_config=False) assert not spack.compilers.config.all_compilers(init_config=False)
@pytest.mark.regression("25992") @pytest.mark.regression("25992")
@ -140,10 +140,10 @@ def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml):
def test_bootstrap_search_for_compilers_with_environment_active( def test_bootstrap_search_for_compilers_with_environment_active(
no_packages_yaml, active_mock_environment no_packages_yaml, active_mock_environment
): ):
assert not spack.compilers.all_compilers(init_config=False) assert not spack.compilers.config.all_compilers(init_config=False)
with spack.bootstrap.ensure_bootstrap_configuration(): with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.compilers.all_compilers(init_config=False) assert spack.compilers.config.all_compilers(init_config=False)
assert not spack.compilers.all_compilers(init_config=False) assert not spack.compilers.config.all_compilers(init_config=False)
@pytest.mark.regression("26189") @pytest.mark.regression("26189")

View File

@ -8,7 +8,7 @@
import pytest import pytest
import spack.cmd.compiler import spack.cmd.compiler
import spack.compilers import spack.compilers.config
import spack.config import spack.config
import spack.main import spack.main
import spack.spec import spack.spec
@ -84,11 +84,13 @@ def test_compiler_find_without_paths(no_packages_yaml, working_env, mock_executa
@pytest.mark.regression("37996") @pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages): def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration.""" """Tests that we can remove a compiler from configuration."""
assert any(compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.all_compilers()) assert any(
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
)
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None) args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args) spack.cmd.compiler.compiler_remove(args)
assert not any( assert not any(
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.all_compilers() compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
) )
@ -98,11 +100,13 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
site_config = spack.config.get("packages", scope="site") site_config = spack.config.get("packages", scope="site")
spack.config.set("packages", site_config, scope="user") spack.config.set("packages", site_config, scope="user")
assert any(compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.all_compilers()) assert any(
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
)
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None) args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args) spack.cmd.compiler.compiler_remove(args)
assert not any( assert not any(
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.all_compilers() compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
) )
@ -123,7 +127,7 @@ def test_compiler_add(mutable_config, mock_executable):
bin_dir = gcc_path.parent bin_dir = gcc_path.parent
root_dir = bin_dir.parent root_dir = bin_dir.parent
compilers_before_find = set(spack.compilers.all_compilers()) compilers_before_find = set(spack.compilers.config.all_compilers())
args = spack.util.pattern.Bunch( args = spack.util.pattern.Bunch(
all=None, all=None,
compiler_spec=None, compiler_spec=None,
@ -133,7 +137,7 @@ def test_compiler_add(mutable_config, mock_executable):
jobs=1, jobs=1,
) )
spack.cmd.compiler.compiler_find(args) spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compilers()) compilers_after_find = set(spack.compilers.config.all_compilers())
compilers_added_by_find = compilers_after_find - compilers_before_find compilers_added_by_find = compilers_after_find - compilers_before_find
assert len(compilers_added_by_find) == 1 assert len(compilers_added_by_find) == 1
@ -155,7 +159,7 @@ def test_compiler_find_prefer_no_suffix(no_packages_yaml, working_env, compilers
assert "llvm@11.0.0" in output assert "llvm@11.0.0" in output
assert "gcc@8.4.0" in output assert "gcc@8.4.0" in output
compilers = spack.compilers.all_compilers_from(no_packages_yaml, scope="site") compilers = spack.compilers.config.all_compilers_from(no_packages_yaml, scope="site")
clang = [x for x in compilers if x.satisfies("llvm@11")] clang = [x for x in compilers if x.satisfies("llvm@11")]
assert len(clang) == 1 assert len(clang) == 1
@ -175,7 +179,7 @@ def test_compiler_find_path_order(no_packages_yaml, working_env, compilers_dir):
compiler("find", "--scope=site") compiler("find", "--scope=site")
compilers = spack.compilers.all_compilers(scope="site") compilers = spack.compilers.config.all_compilers(scope="site")
gcc = [x for x in compilers if x.satisfies("gcc@8.4")] gcc = [x for x in compilers if x.satisfies("gcc@8.4")]
# Ensure we found both duplicates # Ensure we found both duplicates

View File

@ -0,0 +1,121 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import os
import pytest
import llnl.util.filesystem as fs
import spack.compilers.config
import spack.compilers.libraries
import spack.util.executable
import spack.util.module_cmd
without_flag_output = "ld -L/path/to/first/lib -L/path/to/second/lib64"
with_flag_output = "ld -L/path/to/first/with/flag/lib -L/path/to/second/lib64"
def call_compiler(exe, *args, **kwargs):
# This method can replace Executable.__call__ to emulate a compiler that
# changes libraries depending on a flag.
if "--correct-flag" in exe.exe:
return with_flag_output
return without_flag_output
@pytest.fixture()
def mock_gcc(config):
compilers = spack.compilers.config.all_compilers_from(configuration=config)
compilers.sort(key=lambda x: (x.name == "gcc", x.version))
# Deepcopy is used to avoid more boilerplate when changing the "extra_attributes"
return copy.deepcopy(compilers[-1])
class TestCompilerPropertyDetector:
@pytest.mark.parametrize(
"language,flagname",
[
("cxx", "cxxflags"),
("cxx", "cppflags"),
("cxx", "ldflags"),
("c", "cflags"),
("c", "cppflags"),
],
)
@pytest.mark.not_on_windows("Not supported on Windows")
def test_compile_dummy_c_source(self, mock_gcc, monkeypatch, language, flagname):
monkeypatch.setattr(spack.util.executable.Executable, "__call__", call_compiler)
for key in list(mock_gcc.extra_attributes["compilers"]):
if key == language:
continue
mock_gcc.extra_attributes["compilers"].pop(key)
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
# Test without flags
assert detector._compile_dummy_c_source() == without_flag_output
# Set flags and test
if flagname:
mock_gcc.extra_attributes.setdefault("flags", {})
monkeypatch.setitem(mock_gcc.extra_attributes["flags"], flagname, "--correct-flag")
assert detector._compile_dummy_c_source() == with_flag_output
def test_compile_dummy_c_source_no_path(self, mock_gcc):
mock_gcc.extra_attributes["compilers"] = {}
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
assert detector._compile_dummy_c_source() is None
def test_compile_dummy_c_source_no_verbose_flags(self, mock_gcc, monkeypatch):
monkeypatch.setattr(mock_gcc.package, "verbose_flags", "")
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
assert detector._compile_dummy_c_source() is None
def test_compile_dummy_c_source_load_env(self, mock_gcc, monkeypatch, tmp_path):
gcc = tmp_path / "gcc"
gcc.write_text(
f"""#!/bin/sh
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
printf '{without_flag_output}'
fi
"""
)
fs.set_executable(str(gcc))
# Set module load to turn compiler on
def module(*args):
if args[0] == "show":
return ""
elif args[0] == "load":
monkeypatch.setenv("MODULE_LOADED", "1")
monkeypatch.setattr(spack.util.module_cmd, "module", module)
mock_gcc.extra_attributes["compilers"]["c"] = str(gcc)
mock_gcc.extra_attributes["environment"] = {"set": {"ENV_SET": "1"}}
mock_gcc.external_modules = ["turn_on"]
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
assert detector._compile_dummy_c_source() == without_flag_output
@pytest.mark.not_on_windows("Not supported on Windows")
def test_implicit_rpaths(self, mock_gcc, dirs_with_libfiles, monkeypatch):
lib_to_dirs, all_dirs = dirs_with_libfiles
monkeypatch.setattr(spack.compilers.libraries.CompilerPropertyDetector, "_CACHE", {})
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
detector._CACHE[mock_gcc.dag_hash()] = "ld " + " ".join(f"-L{d}" for d in all_dirs)
retrieved_rpaths = detector.implicit_rpaths()
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
def test_compiler_environment(self, working_env, mock_gcc, monkeypatch):
"""Test whether environment modifications are applied in compiler_environment"""
monkeypatch.delenv("TEST", raising=False)
mock_gcc.extra_attributes["environment"] = {"set": {"TEST": "yes"}}
detector = spack.compilers.libraries.CompilerPropertyDetector(mock_gcc)
with detector.compiler_environment():
assert os.environ["TEST"] == "yes"

View File

@ -14,8 +14,7 @@
import spack.binary_distribution import spack.binary_distribution
import spack.cmd import spack.cmd
import spack.compiler import spack.compilers.config
import spack.compilers
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
@ -408,10 +407,10 @@ def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
# spec = Spec("pkg-a %clang@12.2.0 platform=test os=fe target=fe") # spec = Spec("pkg-a %clang@12.2.0 platform=test os=fe target=fe")
# #
# # Get the compiler that matches the spec ( # # Get the compiler that matches the spec (
# compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture) # compiler = spack.compilers.config.compiler_for_spec("clang@=12.2.0", spec.architecture)
# #
# # Configure spack to have two identical compilers with different flags # # Configure spack to have two identical compilers with different flags
# default_dict = spack.compilers._to_dict(compiler) # default_dict = spack.compilers.config._to_dict(compiler)
# different_dict = copy.deepcopy(default_dict) # different_dict = copy.deepcopy(default_dict)
# different_dict["compiler"]["flags"] = {"cflags": "-O2"} # different_dict["compiler"]["flags"] = {"cflags": "-O2"}
# #
@ -2363,7 +2362,7 @@ def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_
mpileaks = [s for s in mutable_database.query_local() if s.name == "mpileaks"] mpileaks = [s for s in mutable_database.query_local() if s.name == "mpileaks"]
# Remove gcc@10.2.1 # Remove gcc@10.2.1
remover = spack.compilers.CompilerRemover(mutable_config) remover = spack.compilers.config.CompilerRemover(mutable_config)
remover.mark_compilers(match="gcc@=10.2.1") remover.mark_compilers(match="gcc@=10.2.1")
remover.flush() remover.flush()
mutable_config.set("concretizer:reuse", True) mutable_config.set("concretizer:reuse", True)

View File

@ -35,8 +35,7 @@
import spack.binary_distribution import spack.binary_distribution
import spack.bootstrap.core import spack.bootstrap.core
import spack.caches import spack.caches
import spack.compiler import spack.compilers.libraries
import spack.compilers
import spack.config import spack.config
import spack.directives_meta import spack.directives_meta
import spack.environment as ev import spack.environment as ev
@ -47,7 +46,6 @@
import spack.platforms import spack.platforms
import spack.repo import spack.repo
import spack.solver.asp import spack.solver.asp
import spack.solver.libc
import spack.spec import spack.spec
import spack.stage import spack.stage
import spack.store import spack.store
@ -295,23 +293,6 @@ def archspec_host_is_spack_test_host(monkeypatch):
monkeypatch.setattr(archspec.cpu, "host", _host) monkeypatch.setattr(archspec.cpu, "host", _host)
#
# Disable checks on compiler executable existence
#
@pytest.fixture(scope="function", autouse=True)
def mock_compiler_executable_verification(request, monkeypatch):
"""Mock the compiler executable verification to allow missing executables.
This fixture can be disabled for tests of the compiler verification
functionality by::
@pytest.mark.enable_compiler_verification
If a test is marked in that way this is a no-op."""
if "enable_compiler_verification" not in request.keywords:
monkeypatch.setattr(spack.compiler.Compiler, "verify_executables", _return_none)
# Hooks to add command line options or set other custom behaviors. # Hooks to add command line options or set other custom behaviors.
# They must be placed here to be found by pytest. See: # They must be placed here to be found by pytest. See:
# #
@ -501,16 +482,11 @@ def mock_binary_index(monkeypatch, tmpdir_factory):
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def _skip_if_missing_executables(request): def _skip_if_missing_executables(request, monkeypatch):
"""Permits to mark tests with 'require_executables' and skip the """Permits to mark tests with 'require_executables' and skip the
tests if the executables passed as arguments are not found. tests if the executables passed as arguments are not found.
""" """
if hasattr(request.node, "get_marker"): marker = request.node.get_closest_marker("requires_executables")
# TODO: Remove the deprecated API as soon as we drop support for Python 2.6
marker = request.node.get_marker("requires_executables")
else:
marker = request.node.get_closest_marker("requires_executables")
if marker: if marker:
required_execs = marker.args required_execs = marker.args
missing_execs = [x for x in required_execs if spack.util.executable.which(x) is None] missing_execs = [x for x in required_execs if spack.util.executable.which(x) is None]
@ -518,6 +494,9 @@ def _skip_if_missing_executables(request):
msg = "could not find executables: {0}" msg = "could not find executables: {0}"
pytest.skip(msg.format(", ".join(missing_execs))) pytest.skip(msg.format(", ".join(missing_execs)))
# In case we require a compiler, clear the caches used to speed-up detection
monkeypatch.setattr(spack.compilers.libraries.DefaultDynamicLinkerFilter, "_CACHE", {})
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def test_platform(): def test_platform():
@ -962,26 +941,11 @@ def _return_none(*args):
return None return None
def _compiler_output(self):
return ""
def _get_real_version(self):
return str(self.version)
@pytest.fixture(scope="function", autouse=True)
def disable_compiler_execution(monkeypatch, request):
"""Disable compiler execution to determine implicit link paths and libc flavor and version.
To re-enable use `@pytest.mark.enable_compiler_execution`"""
if "enable_compiler_execution" not in request.keywords:
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _compiler_output)
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", _get_real_version)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def disable_compiler_output_cache(monkeypatch): def disable_compiler_output_cache(monkeypatch):
monkeypatch.setattr(spack.compiler, "COMPILER_CACHE", spack.compiler.CompilerCache()) monkeypatch.setattr(
spack.compilers.libraries, "COMPILER_CACHE", spack.compilers.libraries.CompilerCache()
)
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
@ -2111,11 +2075,11 @@ def do_not_check_runtimes_on_reuse(monkeypatch):
def _c_compiler_always_exists(): def _c_compiler_always_exists():
fn = spack.solver.asp.c_compiler_runs fn = spack.solver.asp.c_compiler_runs
spack.solver.asp.c_compiler_runs = _true spack.solver.asp.c_compiler_runs = _true
mthd = spack.solver.libc.CompilerPropertyDetector.default_libc mthd = spack.compilers.libraries.CompilerPropertyDetector.default_libc
spack.solver.libc.CompilerPropertyDetector.default_libc = _libc_from_python spack.compilers.libraries.CompilerPropertyDetector.default_libc = _libc_from_python
yield yield
spack.solver.asp.c_compiler_runs = fn spack.solver.asp.c_compiler_runs = fn
spack.solver.libc.CompilerPropertyDetector.default_libc = mthd spack.compilers.libraries.CompilerPropertyDetector.default_libc = mthd
@pytest.fixture(scope="session") @pytest.fixture(scope="session")

View File

@ -17,7 +17,7 @@
import spack import spack
import spack.cmd import spack.cmd
import spack.cmd.external import spack.cmd.external
import spack.compilers import spack.compilers.config
import spack.cray_manifest as cray_manifest import spack.cray_manifest as cray_manifest
import spack.platforms import spack.platforms
import spack.platforms.test import spack.platforms.test
@ -307,7 +307,7 @@ def test_translate_compiler_name(_common_arch):
def test_failed_translate_compiler_name(_common_arch): def test_failed_translate_compiler_name(_common_arch):
unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0") unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0")
with pytest.raises(spack.compilers.UnknownCompilerError): with pytest.raises(spack.compilers.config.UnknownCompilerError):
compiler_from_entry(unknown_compiler.compiler_json(), "/example/file") compiler_from_entry(unknown_compiler.compiler_json(), "/example/file")
spec_json = JsonSpecEntry( spec_json = JsonSpecEntry(
@ -321,7 +321,7 @@ def test_failed_translate_compiler_name(_common_arch):
parameters={}, parameters={},
).to_dict() ).to_dict()
with pytest.raises(spack.compilers.UnknownCompilerError): with pytest.raises(spack.compilers.config.UnknownCompilerError):
entries_to_specs([spec_json]) entries_to_specs([spec_json])
@ -367,7 +367,7 @@ def test_read_cray_manifest_add_compiler_failure(
"""Check that cray manifest can be read even if some compilers cannot """Check that cray manifest can be read even if some compilers cannot
be added. be added.
""" """
orig_add_compiler_to_config = spack.compilers.add_compiler_to_config orig_add_compiler_to_config = spack.compilers.config.add_compiler_to_config
class fail_for_clang: class fail_for_clang:
def __init__(self): def __init__(self):
@ -380,7 +380,7 @@ def __call__(self, compiler, **kwargs):
return orig_add_compiler_to_config(compiler, **kwargs) return orig_add_compiler_to_config(compiler, **kwargs)
checker = fail_for_clang() checker = fail_for_clang()
monkeypatch.setattr(spack.compilers, "add_compiler_to_config", checker) monkeypatch.setattr(spack.compilers.config, "add_compiler_to_config", checker)
with tmpdir.as_cwd(): with tmpdir.as_cwd():
test_db_fname = "external-db.json" test_db_fname = "external-db.json"
@ -405,7 +405,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
cray_manifest.read(test_db_fname, True) cray_manifest.read(test_db_fname, True)
cray_manifest.read(test_db_fname, True) cray_manifest.read(test_db_fname, True)
compilers = spack.compilers.all_compilers() compilers = spack.compilers.config.all_compilers()
filtered = list( filtered = list(
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112") c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112")
) )

View File

@ -9,7 +9,7 @@
import pytest import pytest
import spack.paths import spack.paths
from spack.compiler import _parse_non_system_link_dirs from spack.compilers.libraries import parse_non_system_link_dirs
drive = "" drive = ""
if sys.platform == "win32": if sys.platform == "win32":
@ -26,13 +26,13 @@
def allow_nonexistent_paths(monkeypatch): def allow_nonexistent_paths(monkeypatch):
# Allow nonexistent paths to be detected as part of the output # Allow nonexistent paths to be detected as part of the output
# for testing purposes. # for testing purposes.
monkeypatch.setattr(os.path, "isdir", lambda x: True) monkeypatch.setattr(spack.compilers.libraries, "filter_non_existing_dirs", lambda x: x)
def check_link_paths(filename, paths): def check_link_paths(filename, paths):
with open(os.path.join(datadir, filename)) as file: with open(os.path.join(datadir, filename)) as file:
output = file.read() output = file.read()
detected_paths = _parse_non_system_link_dirs(output) detected_paths = parse_non_system_link_dirs(output)
actual = detected_paths actual = detected_paths
expected = paths expected = paths

View File

@ -17,7 +17,8 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.compilers import spack.compilers.config
import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
import spack.install_test import spack.install_test
@ -277,7 +278,7 @@ def test_package_test_no_compilers(mock_packages, monkeypatch, capfd):
def compilers(compiler, arch_spec): def compilers(compiler, arch_spec):
return None return None
monkeypatch.setattr(spack.compilers, "compilers_for_spec", compilers) monkeypatch.setattr(spack.compilers.config, "compilers_for_spec", compilers)
s = spack.spec.Spec("pkg-a") s = spack.spec.Spec("pkg-a")
pkg = BaseTestPackage(s) pkg = BaseTestPackage(s)

View File

@ -11,8 +11,6 @@ markers =
regression: tests that fix a reported bug regression: tests that fix a reported bug
requires_executables: tests that requires certain executables in PATH to run requires_executables: tests that requires certain executables in PATH to run
nomockstage: use a stage area specifically created for this test, instead of relying on a common mock stage nomockstage: use a stage area specifically created for this test, instead of relying on a common mock stage
enable_compiler_verification: enable compiler verification within unit tests
enable_compiler_execution: enable compiler execution to detect link paths and libc
disable_clean_stage_check: avoid failing tests if there are leftover files in the stage area disable_clean_stage_check: avoid failing tests if there are leftover files in the stage area
not_on_windows: mark tests that are skipped on Windows not_on_windows: mark tests that are skipped on Windows
only_windows: mark tests that are skipped everywhere but Windows only_windows: mark tests that are skipped everywhere but Windows