Overhaul the spack compiler
command
This reverts commit 2c47dddbc1
.
Now, `spack compiler` writes by default in packages.yaml. Entries
in old `compilers.yaml` are converted to external specs as a way to
support legacy configuration.
Since this operation is expensive, an environment variable can be
used to enforce the deprecation of `compiler.yaml`.
The --mixed-toolchain option has been deprecated, since it stops to
make sense once compiler are treated as nodes.
This commit is contained in:
parent
da02a4a606
commit
285926cb69
@ -4,6 +4,7 @@
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import index_by
|
||||
@ -34,13 +35,13 @@ def setup_parser(subparser):
|
||||
"--mixed-toolchain",
|
||||
action="store_true",
|
||||
default=sys.platform == "darwin",
|
||||
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--no-mixed-toolchain",
|
||||
action="store_false",
|
||||
dest="mixed_toolchain",
|
||||
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
@ -79,19 +80,22 @@ def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
"""
|
||||
if args.mixed_toolchain:
|
||||
warnings.warn(
|
||||
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
|
||||
"has no effect. The option will be removed in Spack v0.25"
|
||||
)
|
||||
|
||||
paths = args.add_paths or None
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
path_hints=paths, scope=args.scope, max_workers=args.jobs
|
||||
)
|
||||
if new_compilers:
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
|
||||
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
|
||||
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers)
|
||||
colify(reversed(compiler_strs), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
@ -100,52 +104,69 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
remover = spack.compilers.CompilerRemover(spack.config.CONFIG)
|
||||
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope)
|
||||
if not candidates:
|
||||
tty.die(f"No compiler matches '{args.compiler_spec}'")
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates))
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
if not args.all and len(candidates) > 1:
|
||||
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
print(
|
||||
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
|
||||
" to remove all of them."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
remover.flush()
|
||||
tty.msg("The following compilers have been removed:")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
"""Print info about all compilers matching a spec."""
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
query = spack.spec.Spec(args.compiler_spec)
|
||||
all_compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
|
||||
|
||||
compilers = [x for x in all_compilers if x.satisfies(query)]
|
||||
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
tty.die(f"No compilers match spec {query.cformat()}")
|
||||
else:
|
||||
for c in compilers:
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
if c.flags:
|
||||
print("\tflags:")
|
||||
for flag, flag_value in c.flags.items():
|
||||
print("\t\t%s = %s" % (flag, flag_value))
|
||||
if len(c.environment) != 0:
|
||||
if len(c.environment.get("set", {})) != 0:
|
||||
print("\tenvironment:")
|
||||
print("\t set:")
|
||||
for key, value in c.environment["set"].items():
|
||||
print("\t %s = %s" % (key, value))
|
||||
if c.extra_rpaths:
|
||||
print("\tExtra rpaths:")
|
||||
for extra_rpath in c.extra_rpaths:
|
||||
print("\t\t%s" % extra_rpath)
|
||||
print("\tmodules = %s" % c.modules)
|
||||
print("\toperating system = %s" % c.operating_system)
|
||||
print(f"{c.cformat()}:")
|
||||
print(f" prefix: {c.external_path}")
|
||||
extra_attributes = getattr(c, "extra_attributes", {})
|
||||
if "compilers" in extra_attributes:
|
||||
print(" compilers:")
|
||||
for language, exe in extra_attributes.get("compilers", {}).items():
|
||||
print(f" {language}: {exe}")
|
||||
if "flags" in extra_attributes:
|
||||
print(" flags:")
|
||||
for flag, flag_value in extra_attributes["flags"].items():
|
||||
print(f" {flag} = {flag_value}")
|
||||
# FIXME (compiler as nodes): recover this printing
|
||||
# if "environment" in extra_attributes:
|
||||
# if len(c.environment.get("set", {})) != 0:
|
||||
# print("\tenvironment:")
|
||||
# print("\t set:")
|
||||
# for key, value in c.environment["set"].items():
|
||||
# print("\t %s = %s" % (key, value))
|
||||
if "extra_rpaths" in extra_attributes:
|
||||
print(" extra rpaths:")
|
||||
for extra_rpath in extra_attributes["extra_rpaths"]:
|
||||
print(f" {extra_rpath}")
|
||||
if getattr(c, "external_modules", []):
|
||||
print(" modules: ")
|
||||
for module in c.external_modules:
|
||||
print(f" {module}")
|
||||
print()
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
@ -162,7 +183,7 @@ def compiler_list(args):
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
|
||||
index = index_by(compilers, spack.compilers.name_os_target)
|
||||
|
||||
tty.msg("Available compilers")
|
||||
|
||||
@ -181,10 +202,10 @@ def compiler_list(args):
|
||||
name, os, target = key
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
os_str += f"-{target}"
|
||||
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}"
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -718,6 +718,9 @@ def print_section(self, section: str, blame: bool = False, *, scope=None) -> Non
|
||||
raise spack.error.ConfigError(f"cannot read '{section}' configuration") from e
|
||||
|
||||
|
||||
ConfigurationType = Union[Configuration, lang.Singleton]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
|
@ -222,13 +222,13 @@ def read(path, apply_updates):
|
||||
compilers = list()
|
||||
if "compilers" in json_data:
|
||||
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
|
||||
tty.debug(f"{path}: {str(len(compilers))} compilers read from manifest")
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler])
|
||||
spack.compilers.add_compiler_to_config(compiler)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
|
@ -1375,8 +1375,7 @@ def compiler(self):
|
||||
"""Get the spack.compiler.Compiler object used to build this package"""
|
||||
if not self.spec.concrete:
|
||||
raise ValueError("Can only get a compiler for a concrete package.")
|
||||
|
||||
return spack.compilers.compiler_for_spec(self.spec.compiler, self.spec.architecture)
|
||||
raise NotImplementedError("Wrapper to old API still to be implemented")
|
||||
|
||||
def url_version(self, version):
|
||||
"""
|
||||
|
Loading…
Reference in New Issue
Block a user