diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index aeb1114c5af..56139843407 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -10,6 +10,7 @@ import errno import io import multiprocessing +import multiprocessing.connection import os import re import select diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py index 067e884b503..3feaef1d374 100644 --- a/lib/spack/spack/bootstrap/config.py +++ b/lib/spack/spack/bootstrap/config.py @@ -143,11 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]: def _add_compilers_if_missing() -> None: arch = spack.spec.ArchSpec.frontend_arch() if not spack.compilers.compilers_for_arch(arch): - new_compilers = spack.compilers.find_new_compilers( - mixed_toolchain=sys.platform == "darwin" - ) - if new_compilers: - spack.compilers.add_compilers_to_config(new_compilers) + spack.compilers.find_compilers() @contextlib.contextmanager diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 860f0a9ee0b..0194eda698f 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -35,13 +35,13 @@ def setup_parser(subparser): "--mixed-toolchain", action="store_true", default=sys.platform == "darwin", - help="Allow mixed toolchains (for example: clang, clang++, gfortran)", + help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)", ) mixed_toolchain_group.add_argument( "--no-mixed-toolchain", action="store_false", dest="mixed_toolchain", - help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)", + help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)", ) find_parser.add_argument("add_paths", nargs=argparse.REMAINDER) find_parser.add_argument( @@ -78,25 +78,16 @@ def setup_parser(subparser): def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration. - """ - # None signals spack.compiler.find_compilers to use its default logic paths = args.add_paths or None - - # Below scope=None because we want new compilers that don't appear - # in any other configuration. - new_compilers = spack.compilers.find_new_compilers( - paths, scope=None, mixed_toolchain=args.mixed_toolchain - ) + new_compilers = spack.compilers.find_compilers(path_hints=paths, scope=args.scope) if new_compilers: - spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope) n = len(new_compilers) s = "s" if n > 1 else "" - - config = spack.config.CONFIG - filename = config.get_config_filename(args.scope, "compilers") - tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) - colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4) + filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers") + tty.msg(f"Added {n:d} new compiler{s} to {filename}") + compiler_strs = sorted(f"{c.spec}" for c in new_compilers) + colify(reversed(compiler_strs), indent=4) else: tty.msg("Found no new compilers") tty.msg("Compilers are defined in the following files:") diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 9712b63077b..403fc8e7cb2 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -7,11 +7,9 @@ system and configuring Spack to use multiple compilers. """ import collections -import itertools -import multiprocessing.pool import os import warnings -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Optional import archspec.cpu @@ -63,6 +61,10 @@ } +#: Tag used to identify packages providing a compiler +COMPILER_TAG = "compiler" + + def pkg_spec_for_compiler(cspec): """Return the spec of the package that provides the compiler.""" for spec, package in _compiler_to_pkg.items(): @@ -111,150 +113,37 @@ def _to_dict(compiler): def get_compiler_config( + configuration: "spack.config.Configuration", *, scope: Optional[str] = None +) -> List[Dict]: + """Return the compiler configuration for the specified architecture.""" + compilers_yaml = configuration.get("compilers", scope=scope) + if not compilers_yaml: + return [] + return compilers_yaml + + +def get_compiler_config_from_packages( configuration: "spack.config.Configuration", *, scope: Optional[str] = None, init_config: bool = False, ) -> List[Dict]: - """Return the compiler configuration for the specified architecture.""" - config = configuration.get("compilers", scope=scope) or [] - if config or not init_config: - return config + """Return the compiler configuration from packages.yaml""" + packages_yaml = configuration.get("packages", scope=scope) + configs = CompilerConfigFactory.from_packages_yaml(packages_yaml) + if configs or not init_config: + return configs - merged_config = configuration.get("compilers") - if merged_config: + merged_packages_yaml = configuration.get("packages") + configs = CompilerConfigFactory.from_packages_yaml(merged_packages_yaml) + if configs: # Config is empty for this scope # Do not init config because there is a non-empty scope - return config + return configs - _init_compiler_config(configuration, scope=scope) - config = configuration.get("compilers", scope=scope) - return config - - -def get_compiler_config_from_packages( - configuration: "spack.config.Configuration", *, scope: Optional[str] = None -) -> List[Dict]: - """Return the compiler configuration from packages.yaml""" - config = configuration.get("packages", scope=scope) - if not config: - return [] - - packages = [] - compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys()) - for name, entry in config.items(): - if name not in compiler_package_names: - continue - externals_config = entry.get("externals", None) - if not externals_config: - continue - packages.extend(_compiler_config_from_package_config(externals_config)) - - return packages - - -def _compiler_config_from_package_config(config): - compilers = [] - for entry in config: - compiler = _compiler_config_from_external(entry) - if compiler: - compilers.append(compiler) - - return compilers - - -def _compiler_config_from_external(config): - extra_attributes_key = "extra_attributes" - compilers_key = "compilers" - c_key, cxx_key, fortran_key = "c", "cxx", "fortran" - - # Allow `@x.y.z` instead of `@=x.y.z` - spec = spack.spec.parse_with_version_concrete(config["spec"]) - - compiler_spec = spack.spec.CompilerSpec( - package_name_to_compiler_name.get(spec.name, spec.name), spec.version - ) - - err_header = f"The external spec '{spec}' cannot be used as a compiler" - - # If extra_attributes is not there I might not want to use this entry as a compiler, - # therefore just leave a debug message, but don't be loud with a warning. - if extra_attributes_key not in config: - tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key") - return None - extra_attributes = config[extra_attributes_key] - - # If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler - if compilers_key not in extra_attributes: - warnings.warn( - f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'" - ) - return None - attribute_compilers = extra_attributes[compilers_key] - - if c_key not in attribute_compilers: - warnings.warn( - f"{err_header}: missing the C compiler path under " - f"'{extra_attributes_key}:{compilers_key}'" - ) - return None - c_compiler = attribute_compilers[c_key] - - # C++ and Fortran compilers are not mandatory, so let's just leave a debug trace - if cxx_key not in attribute_compilers: - tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler") - - if fortran_key not in attribute_compilers: - tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler") - - # compilers format has cc/fc/f77, externals format has "c/fortran" - paths = { - "cc": c_compiler, - "cxx": attribute_compilers.get(cxx_key, None), - "fc": attribute_compilers.get(fortran_key, None), - "f77": attribute_compilers.get(fortran_key, None), - } - - if not spec.architecture: - host_platform = spack.platforms.host() - operating_system = host_platform.operating_system("default_os") - target = host_platform.target("default_target").microarchitecture - else: - target = spec.architecture.target - if not target: - target = spack.platforms.host().target("default_target") - target = target.microarchitecture - - operating_system = spec.os - if not operating_system: - host_platform = spack.platforms.host() - operating_system = host_platform.operating_system("default_os") - - compiler_entry = { - "compiler": { - "spec": str(compiler_spec), - "paths": paths, - "flags": extra_attributes.get("flags", {}), - "operating_system": str(operating_system), - "target": str(target.family), - "modules": config.get("modules", []), - "environment": extra_attributes.get("environment", {}), - "extra_rpaths": extra_attributes.get("extra_rpaths", []), - "implicit_rpaths": extra_attributes.get("implicit_rpaths", None), - } - } - return compiler_entry - - -def _init_compiler_config( - configuration: "spack.config.Configuration", *, scope: Optional[str] -) -> None: - """Compiler search used when Spack has no compilers.""" - compilers = find_compilers() - compilers_dict = [] - for compiler in compilers: - compilers_dict.append(_to_dict(compiler)) - configuration.set("compilers", compilers_dict, scope=scope) + find_compilers(scope=scope) + packages_yaml = configuration.get("packages", scope=scope) + return CompilerConfigFactory.from_packages_yaml(packages_yaml) def compiler_config_files(): @@ -278,9 +167,7 @@ def add_compilers_to_config(compilers, scope=None): compilers: a list of Compiler objects. scope: configuration scope to modify. """ - compiler_config = get_compiler_config( - configuration=spack.config.CONFIG, scope=scope, init_config=False - ) + compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope) for compiler in compilers: if not compiler.cc: tty.debug(f"{compiler.spec} does not have a C compiler") @@ -329,9 +216,7 @@ def _remove_compiler_from_scope(compiler_spec, scope): True if one or more compiler entries were actually removed, False otherwise """ assert scope is not None, "a specific scope is needed when calling this function" - compiler_config = get_compiler_config( - configuration=spack.config.CONFIG, scope=scope, init_config=False - ) + compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope) filtered_compiler_config = [ compiler_entry for compiler_entry in compiler_config @@ -359,10 +244,12 @@ def all_compilers_config( """Return a set of specs for all the compiler versions currently available to build with. These are instances of CompilerSpec. """ - from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope) - if from_packages_yaml: + from_compilers_yaml = get_compiler_config(configuration, scope=scope) + if from_compilers_yaml: init_config = False - from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config) + from_packages_yaml = get_compiler_config_from_packages( + configuration, scope=scope, init_config=init_config + ) result = from_compilers_yaml + from_packages_yaml # Dedupe entries by the compiler they represent @@ -380,79 +267,36 @@ def all_compiler_specs(scope=None, init_config=True): def find_compilers( - path_hints: Optional[List[str]] = None, *, mixed_toolchain=False + path_hints: Optional[List[str]] = None, + *, + scope: Optional[str] = None, + mixed_toolchain: bool = False, ) -> List["spack.compiler.Compiler"]: - """Return the list of compilers found in the paths given as arguments. + """Searches for compiler in the paths given as argument. If any new compiler is found, the + configuration is updated, and the list of new compiler objects is returned. Args: path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` environment variable will be used if the value is None + scope: configuration scope to modify mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for a certain language """ + # TODO: pass max_workers to this function if path_hints is None: path_hints = get_path("PATH") default_paths = fs.search_paths_for_executables(*path_hints) - - # To detect the version of the compilers, we dispatch a certain number - # of function calls to different workers. Here we construct the list - # of arguments for each call. - arguments = [] - for o in all_os_classes(): - search_paths = getattr(o, "compiler_search_paths", default_paths) - arguments.extend(arguments_to_detect_version_fn(o, search_paths)) - - # Here we map the function arguments to the corresponding calls - tp = multiprocessing.pool.ThreadPool() - try: - detected_versions = tp.map(detect_version, arguments) - finally: - tp.close() - - def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool: - value, error = item - if error is None: - return True - try: - # This will fail on Python 2.6 if a non ascii - # character is in the error - tty.debug(error) - except UnicodeEncodeError: - pass - return False - - def remove_errors( - item: Tuple[Optional[DetectVersionArgs], Optional[str]] - ) -> DetectVersionArgs: - value, _ = item - assert value is not None - return value - - return make_compiler_list( - [remove_errors(detected) for detected in detected_versions if valid_version(detected)], - mixed_toolchain=mixed_toolchain, + compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True) + detected_packages = spack.detection.by_path( + compiler_pkgs, path_hints=default_paths, max_workers=1 ) - - -def find_new_compilers( - path_hints: Optional[List[str]] = None, - scope: Optional[str] = None, - *, - mixed_toolchain: bool = False, -): - """Same as ``find_compilers`` but return only the compilers that are not - already in compilers.yaml. - - Args: - path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` - environment variable will be used if the value is None - scope: scope to look for a compiler. If None consider the merged configuration. - mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for - a certain language - """ - compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain) - - return select_new_compilers(compilers, scope) + new_compilers = spack.detection.update_configuration( + detected_packages, buildable=True, scope=scope + ) + return [ + _compiler_from_config_entry(c["compiler"]) + for c in CompilerConfigFactory.from_specs(new_compilers) + ] def select_new_compilers(compilers, scope=None): @@ -569,7 +413,7 @@ def compilers_for_spec( def compilers_for_arch(arch_spec, scope=None): - config = all_compilers_config(spack.config.CONFIG, scope=scope) + config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False) return list(get_compilers(config, arch_spec=arch_spec)) @@ -819,228 +663,6 @@ def all_compiler_types(): ) -def arguments_to_detect_version_fn( - operating_system: spack.operating_systems.OperatingSystem, paths: List[str] -) -> List[DetectVersionArgs]: - """Returns a list of DetectVersionArgs tuples to be used in a - corresponding function to detect compiler versions. - - The ``operating_system`` instance can customize the behavior of this - function by providing a method called with the same name. - - Args: - operating_system: the operating system on which we are looking for compilers - paths: paths to search for compilers - - Returns: - List of DetectVersionArgs tuples. Each item in the list will be later - mapped to the corresponding function call to detect the version of the - compilers in this OS. - """ - - def _default(search_paths: List[str]) -> List[DetectVersionArgs]: - command_arguments: List[DetectVersionArgs] = [] - files_to_be_tested = fs.files_in(*search_paths) - for compiler_name in supported_compilers_for_host_platform(): - compiler_cls = class_for_compiler_name(compiler_name) - - for language in ("cc", "cxx", "f77", "fc"): - # Select only the files matching a regexp - for (file, full_path), regexp in itertools.product( - files_to_be_tested, compiler_cls.search_regexps(language) - ): - match = regexp.match(file) - if match: - compiler_id = CompilerID(operating_system, compiler_name, None) - detect_version_args = DetectVersionArgs( - id=compiler_id, - variation=NameVariation(*match.groups()), - language=language, - path=full_path, - ) - command_arguments.append(detect_version_args) - - return command_arguments - - fn = getattr(operating_system, "arguments_to_detect_version_fn", _default) - return fn(paths) - - -def detect_version( - detect_version_args: DetectVersionArgs, -) -> Tuple[Optional[DetectVersionArgs], Optional[str]]: - """Computes the version of a compiler and adds it to the information - passed as input. - - As this function is meant to be executed by worker processes it won't - raise any exception but instead will return a (value, error) tuple that - needs to be checked by the code dispatching the calls. - - Args: - detect_version_args: information on the compiler for which we should detect the version. - - Returns: - A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the - version of the compiler was computed correctly and the first argument - of the tuple will contain it. Otherwise ``error`` is a string - containing an explanation on why the version couldn't be computed. - """ - - def _default(fn_args): - compiler_id = fn_args.id - language = fn_args.language - compiler_cls = class_for_compiler_name(compiler_id.compiler_name) - path = fn_args.path - - # Get compiler names and the callback to detect their versions - callback = getattr(compiler_cls, f"{language}_version") - - try: - version = callback(path) - if version and str(version).strip() and version != "unknown": - value = fn_args._replace(id=compiler_id._replace(version=version)) - return value, None - - error = f"Couldn't get version for compiler {path}".format(path) - except spack.util.executable.ProcessError as e: - error = f"Couldn't get version for compiler {path}\n" + str(e) - except spack.util.executable.ProcessTimeoutError as e: - error = f"Couldn't get version for compiler {path}\n" + str(e) - except Exception as e: - # Catching "Exception" here is fine because it just - # means something went wrong running a candidate executable. - error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format( - path, e.__class__.__name__, str(e) - ) - return None, error - - operating_system = detect_version_args.id.os - fn = getattr(operating_system, "detect_version", _default) - return fn(detect_version_args) - - -def make_compiler_list( - detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False -) -> List["spack.compiler.Compiler"]: - """Process a list of detected versions and turn them into a list of - compiler specs. - - Args: - detected_versions: list of DetectVersionArgs containing a valid version - mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing - - Returns: - list: list of Compiler objects - """ - group_fn = lambda x: (x.id, x.variation, x.language) - sorted_compilers = sorted(detected_versions, key=group_fn) - - # Gather items in a dictionary by the id, name variation and language - compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {} - for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn): - compiler_id, name_variation, language = sort_key - by_compiler_id = compilers_d.setdefault(compiler_id, {}) - by_name_variation = by_compiler_id.setdefault(name_variation, {}) - by_name_variation[language] = next(x.path for x in group) - - def _default_make_compilers(cmp_id, paths): - operating_system, compiler_name, version = cmp_id - compiler_cls = class_for_compiler_name(compiler_name) - spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}") - paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] - # TODO: johnwparent - revist the following line as per discussion at: - # https://github.com/spack/spack/pull/33385/files#r1040036318 - target = archspec.cpu.host() - compiler = compiler_cls(spec, operating_system, str(target.family), paths) - return [compiler] - - # For compilers with the same compiler id: - # - # - Prefer with C compiler to without - # - Prefer with C++ compiler to without - # - Prefer no variations to variations (e.g., clang to clang-gpu) - # - sort_fn = lambda variation: ( - "cc" not in by_compiler_id[variation], # None last - "cxx" not in by_compiler_id[variation], # None last - getattr(variation, "prefix", None), - getattr(variation, "suffix", None), - ) - - # Flatten to a list of compiler id, primary variation and compiler dictionary - flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = [] - for compiler_id, by_compiler_id in compilers_d.items(): - ordered = sorted(by_compiler_id, key=sort_fn) - selected_variation = ordered[0] - selected = by_compiler_id[selected_variation] - - # Fill any missing parts from subsequent entries (without mixing toolchains) - for lang in ["cxx", "f77", "fc"]: - if lang not in selected: - next_lang = next( - (by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None - ) - if next_lang: - selected[lang] = next_lang - - flat_compilers.append((compiler_id, selected_variation, selected)) - - # Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested) - if mixed_toolchain: - make_mixed_toolchain(flat_compilers) - - # Finally, create the compiler list - compilers: List["spack.compiler.Compiler"] = [] - for compiler_id, _, compiler in flat_compilers: - make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers) - candidates = make_compilers(compiler_id, compiler) - compilers.extend(x for x in candidates if x.cc is not None) - - return compilers - - -def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None: - """Add missing compilers across toolchains when they are missing for a particular language. - This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a - fortran compiler (no flang).""" - - # First collect the clangs that are missing a fortran compiler - clangs_without_flang = [ - (id, variation, compiler) - for id, variation, compiler in compilers - if id.compiler_name in ("clang", "apple-clang") - and "f77" not in compiler - and "fc" not in compiler - ] - if not clangs_without_flang: - return - - # Filter on GCCs with fortran compiler - gccs_with_fortran = [ - (id, variation, compiler) - for id, variation, compiler in compilers - if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler - ] - - # Sort these GCCs by "best variation" (no prefix / suffix first) - gccs_with_fortran.sort( - key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None)) - ) - - # Attach the optimal GCC fortran compiler to the clangs that don't have one - for clang_id, _, clang_compiler in clangs_without_flang: - gcc_compiler = next( - (gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None - ) - - if not gcc_compiler: - continue - - # Update the fc / f77 entries - clang_compiler["f77"] = gcc_compiler["f77"] - clang_compiler["fc"] = gcc_compiler["fc"] - - def is_mixed_toolchain(compiler): """Returns True if the current compiler is a mixed toolchain, False otherwise. @@ -1087,6 +709,155 @@ def name_matches(name, name_list): return False +_EXTRA_ATTRIBUTES_KEY = "extra_attributes" +_COMPILERS_KEY = "compilers" +_C_KEY = "c" +_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran" + + +class CompilerConfigFactory: + """Class aggregating all ways of constructing a list of compiler config entries.""" + + @staticmethod + def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]: + result = [] + compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys()) + for s in specs: + if s.name not in compiler_package_names: + continue + + candidate = CompilerConfigFactory._from_external_spec(s) + if candidate is None: + continue + + result.append(candidate) + return result + + @staticmethod + def from_packages_yaml(packages_yaml) -> List[dict]: + compiler_specs = [] + compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys()) + for name, entry in packages_yaml.items(): + if name not in compiler_package_names: + continue + + externals_config = entry.get("externals", None) + if not externals_config: + continue + + current_specs = [] + for current_external in externals_config: + compiler = CompilerConfigFactory._spec_from_external_config(current_external) + if compiler: + current_specs.append(compiler) + compiler_specs.extend(current_specs) + + return CompilerConfigFactory.from_specs(compiler_specs) + + @staticmethod + def _spec_from_external_config(config): + # Allow `@x.y.z` instead of `@=x.y.z` + err_header = f"The external spec '{config['spec']}' cannot be used as a compiler" + # If extra_attributes is not there I might not want to use this entry as a compiler, + # therefore just leave a debug message, but don't be loud with a warning. + if _EXTRA_ATTRIBUTES_KEY not in config: + tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key") + return None + extra_attributes = config[_EXTRA_ATTRIBUTES_KEY] + result = spack.spec.Spec( + str(spack.spec.parse_with_version_concrete(config["spec"])), + external_modules=config.get("modules"), + ) + result.extra_attributes = extra_attributes + return result + + @staticmethod + def _from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]: + spec = spack.spec.parse_with_version_concrete(spec) + extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None) + if extra_attributes is None: + return None + + paths = CompilerConfigFactory._extract_compiler_paths(spec) + if paths is None: + return None + + compiler_spec = spack.spec.CompilerSpec( + package_name_to_compiler_name.get(spec.name, spec.name), spec.version + ) + + operating_system, target = CompilerConfigFactory._extract_os_and_target(spec) + + compiler_entry = { + "compiler": { + "spec": str(compiler_spec), + "paths": paths, + "flags": extra_attributes.get("flags", {}), + "operating_system": str(operating_system), + "target": str(target.family), + "modules": getattr(spec, "external_modules", []), + "environment": extra_attributes.get("environment", {}), + "extra_rpaths": extra_attributes.get("extra_rpaths", []), + "implicit_rpaths": extra_attributes.get("implicit_rpaths", None), + } + } + return compiler_entry + + @staticmethod + def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]: + err_header = f"The external spec '{spec}' cannot be used as a compiler" + extra_attributes = spec.extra_attributes + # If I have 'extra_attributes' warn if 'compilers' is missing, + # or we don't have a C compiler + if _COMPILERS_KEY not in extra_attributes: + warnings.warn( + f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'" + ) + return None + attribute_compilers = extra_attributes[_COMPILERS_KEY] + + if _C_KEY not in attribute_compilers: + warnings.warn( + f"{err_header}: missing the C compiler path under " + f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'" + ) + return None + c_compiler = attribute_compilers[_C_KEY] + + # C++ and Fortran compilers are not mandatory, so let's just leave a debug trace + if _CXX_KEY not in attribute_compilers: + tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler") + + if _FORTRAN_KEY not in attribute_compilers: + tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler") + + # compilers format has cc/fc/f77, externals format has "c/fortran" + return { + "cc": c_compiler, + "cxx": attribute_compilers.get(_CXX_KEY, None), + "fc": attribute_compilers.get(_FORTRAN_KEY, None), + "f77": attribute_compilers.get(_FORTRAN_KEY, None), + } + + @staticmethod + def _extract_os_and_target(spec: "spack.spec.Spec"): + if not spec.architecture: + host_platform = spack.platforms.host() + operating_system = host_platform.operating_system("default_os") + target = host_platform.target("default_target").microarchitecture + else: + target = spec.architecture.target + if not target: + target = spack.platforms.host().target("default_target") + target = target.microarchitecture + + operating_system = spec.os + if not operating_system: + host_platform = spack.platforms.host() + operating_system = host_platform.operating_system("default_os") + return operating_system, target + + class InvalidCompilerConfigurationError(spack.error.SpackError): def __init__(self, compiler_spec): super().__init__( diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index e043c6fb8a4..596d2ccd7ef 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -239,7 +239,7 @@ def update_configuration( external_entries = pkg_config.get("externals", []) assert not isinstance(external_entries, bool), "unexpected value for external entry" - all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries]) + all_new_specs.extend([x.spec for x in new_entries]) if buildable is False: pkg_config["buildable"] = False pkg_to_cfg[package_name] = pkg_config diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 4114a1cdb95..877e294b743 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1644,7 +1644,7 @@ def _concretize_separately(self, tests=False): # Ensure we have compilers in compilers.yaml to avoid that # processes try to write the config file in parallel - _ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True) + _ = spack.compilers.all_compilers_config(spack.config.CONFIG) # Early return if there is nothing to do if len(args) == 0: diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index e12789055c9..b3f1248ce7e 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -285,16 +285,14 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool): return NoDuplicatesCounter(specs, tests=tests) -def all_compilers_in_config(configuration): - return spack.compilers.all_compilers_from(configuration) - - def all_libcs() -> Set[spack.spec.Spec]: """Return a set of all libc specs targeted by any configured compiler. If none, fall back to libc determined from the current Python process if dynamically linked.""" libcs = { - c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc + c.default_libc + for c in spack.compilers.all_compilers_from(spack.config.CONFIG) + if c.default_libc } if libcs: @@ -613,7 +611,7 @@ def _external_config_with_implicit_externals(configuration): if not using_libc_compatibility(): return packages_yaml - for compiler in all_compilers_in_config(configuration): + for compiler in spack.compilers.all_compilers_from(configuration): libc = compiler.default_libc if libc: entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path} @@ -3002,7 +3000,7 @@ class CompilerParser: def __init__(self, configuration) -> None: self.compilers: Set[KnownCompiler] = set() - for c in all_compilers_in_config(configuration): + for c in spack.compilers.all_compilers_from(configuration): if using_libc_compatibility() and not c_compiler_runs(c): tty.debug( f"the C compiler {c.cc} does not exist, or does not run correctly." @@ -3466,7 +3464,7 @@ def reorder_flags(self): """ # reverse compilers so we get highest priority compilers that share a spec compilers = dict( - (c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG)) + (c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG)) ) cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse()) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index ccdc498214c..eddbbf934ca 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4341,9 +4341,9 @@ def attach_git_version_lookup(self): v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname)) -def parse_with_version_concrete(string: str, compiler: bool = False): +def parse_with_version_concrete(spec_like: Union[str, Spec], compiler: bool = False): """Same as Spec(string), but interprets @x as @=x""" - s: Union[CompilerSpec, Spec] = CompilerSpec(string) if compiler else Spec(string) + s: Union[CompilerSpec, Spec] = CompilerSpec(spec_like) if compiler else Spec(spec_like) interpreted_version = s.versions.concrete_range_as_version if interpreted_version: s.versions = vn.VersionList([interpreted_version]) diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 06cff7f5f76..64d876c669c 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -8,7 +8,9 @@ import io import json import os +import pathlib import platform +import shutil import sys import tarfile import urllib.error @@ -16,12 +18,11 @@ import urllib.response from pathlib import Path, PurePath -import py import pytest import archspec.cpu -from llnl.util.filesystem import join_path, visit_directory_tree +from llnl.util.filesystem import copy_tree, join_path, visit_directory_tree from llnl.util.symlink import readlink import spack.binary_distribution as bindist @@ -81,72 +82,67 @@ def test_mirror(mirror_dir): @pytest.fixture(scope="module") -def config_directory(tmpdir_factory): - tmpdir = tmpdir_factory.mktemp("test_configs") - # restore some sane defaults for packages and config - config_path = py.path.local(spack.paths.etc_path) - modules_yaml = config_path.join("defaults", "modules.yaml") - os_modules_yaml = config_path.join( - "defaults", "%s" % platform.system().lower(), "modules.yaml" - ) - packages_yaml = config_path.join("defaults", "packages.yaml") - config_yaml = config_path.join("defaults", "config.yaml") - repos_yaml = config_path.join("defaults", "repos.yaml") - tmpdir.ensure("site", dir=True) - tmpdir.ensure("user", dir=True) - tmpdir.ensure("site/%s" % platform.system().lower(), dir=True) - modules_yaml.copy(tmpdir.join("site", "modules.yaml")) - os_modules_yaml.copy(tmpdir.join("site/%s" % platform.system().lower(), "modules.yaml")) - packages_yaml.copy(tmpdir.join("site", "packages.yaml")) - config_yaml.copy(tmpdir.join("site", "config.yaml")) - repos_yaml.copy(tmpdir.join("site", "repos.yaml")) - yield tmpdir - tmpdir.remove() +def config_directory(tmp_path_factory): + # Copy defaults to a temporary "site" scope + defaults_dir = tmp_path_factory.mktemp("test_configs") + config_path = pathlib.Path(spack.paths.etc_path) + copy_tree(str(config_path / "defaults"), str(defaults_dir / "site")) + + # Create a "user" scope + (defaults_dir / "user").mkdir() + + # Detect compilers + cfg_scopes = [ + spack.config.DirectoryConfigScope(name, str(defaults_dir / name)) + for name in [f"site/{platform.system().lower()}", "site", "user"] + ] + with spack.config.use_configuration(*cfg_scopes): + _ = spack.compilers.find_compilers(scope="site") + + yield defaults_dir + + shutil.rmtree(str(defaults_dir)) @pytest.fixture(scope="function") -def default_config(tmpdir, config_directory, monkeypatch, install_mockery): +def default_config(tmp_path, config_directory, monkeypatch, install_mockery): # This fixture depends on install_mockery to ensure # there is a clear order of initialization. The substitution of the # config scopes here is done on top of the substitution that comes with # install_mockery - mutable_dir = tmpdir.mkdir("mutable_config").join("tmp") - config_directory.copy(mutable_dir) + mutable_dir = tmp_path / "mutable_config" / "tmp" + mutable_dir.mkdir(parents=True) + copy_tree(str(config_directory), str(mutable_dir)) - cfg = spack.config.Configuration( - *[ - spack.config.DirectoryConfigScope(name, str(mutable_dir)) - for name in [f"site/{platform.system().lower()}", "site", "user"] - ] - ) + scopes = [ + spack.config.DirectoryConfigScope(name, str(mutable_dir / name)) + for name in [f"site/{platform.system().lower()}", "site", "user"] + ] - spack.config.CONFIG, old_config = cfg, spack.config.CONFIG - spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path]) - njobs = spack.config.get("config:build_jobs") - if not njobs: - spack.config.set("config:build_jobs", 4, scope="user") - extensions = spack.config.get("config:template_dirs") - if not extensions: - spack.config.set( - "config:template_dirs", - [os.path.join(spack.paths.share_path, "templates")], - scope="user", - ) + with spack.config.use_configuration(*scopes): + spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path]) + njobs = spack.config.get("config:build_jobs") + if not njobs: + spack.config.set("config:build_jobs", 4, scope="user") + extensions = spack.config.get("config:template_dirs") + if not extensions: + spack.config.set( + "config:template_dirs", + [os.path.join(spack.paths.share_path, "templates")], + scope="user", + ) - mutable_dir.ensure("build_stage", dir=True) - build_stage = spack.config.get("config:build_stage") - if not build_stage: - spack.config.set( - "config:build_stage", [str(mutable_dir.join("build_stage"))], scope="user" - ) - timeout = spack.config.get("config:connect_timeout") - if not timeout: - spack.config.set("config:connect_timeout", 10, scope="user") + (mutable_dir / "build_stage").mkdir() + build_stage = spack.config.get("config:build_stage") + if not build_stage: + spack.config.set( + "config:build_stage", [str(mutable_dir / "build_stage")], scope="user" + ) + timeout = spack.config.get("config:connect_timeout") + if not timeout: + spack.config.set("config:connect_timeout", 10, scope="user") - yield spack.config.CONFIG - - spack.config.CONFIG = old_config - mutable_dir.remove() + yield spack.config.CONFIG @pytest.fixture(scope="function") diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py index 2fde7fbc929..f881e0471e2 100644 --- a/lib/spack/spack/test/cmd/compiler.py +++ b/lib/spack/spack/test/cmd/compiler.py @@ -131,7 +131,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages): @pytest.mark.not_on_windows("Cannot execute bash script on Windows") -def test_compiler_add(mutable_config, mock_packages, mock_executable): +def test_compiler_add(mutable_config, mock_executable): """Tests that we can add a compiler to configuration.""" expected_version = "4.5.3" gcc_path = mock_executable( @@ -160,44 +160,6 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable): assert new_compiler.version == spack.version.Version(expected_version) -@pytest.mark.not_on_windows("Cannot execute bash script on Windows") -@pytest.mark.regression("17590") -@pytest.mark.parametrize("mixed_toolchain", [True, False]) -def test_compiler_find_mixed_suffixes( - mixed_toolchain, no_compilers_yaml, working_env, compilers_dir -): - """Ensure that we'll mix compilers with different suffixes when necessary.""" - os.environ["PATH"] = str(compilers_dir) - output = compiler( - "find", "--scope=site", "--mixed-toolchain" if mixed_toolchain else "--no-mixed-toolchain" - ) - - assert "clang@11.0.0" in output - assert "gcc@8.4.0" in output - - config = spack.compilers.get_compiler_config( - no_compilers_yaml, scope="site", init_config=False - ) - clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0") - gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0") - - gfortran_path = str(compilers_dir / "gfortran-8") - - assert clang["paths"] == { - "cc": str(compilers_dir / "clang"), - "cxx": str(compilers_dir / "clang++"), - "f77": gfortran_path if mixed_toolchain else None, - "fc": gfortran_path if mixed_toolchain else None, - } - - assert gcc["paths"] == { - "cc": str(compilers_dir / "gcc-8"), - "cxx": str(compilers_dir / "g++-8"), - "f77": gfortran_path, - "fc": gfortran_path, - } - - @pytest.mark.not_on_windows("Cannot execute bash script on Windows") @pytest.mark.regression("17590") def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir): @@ -209,13 +171,15 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler os.environ["PATH"] = str(compilers_dir) output = compiler("find", "--scope=site") - assert "clang@11.0.0" in output - assert "gcc@8.4.0" in output + assert "clang@=11.0.0" in output + assert "gcc@=8.4.0" in output - config = spack.compilers.get_compiler_config( - no_compilers_yaml, scope="site", init_config=False + compiler_config = spack.compilers.get_compiler_config_from_packages( + no_compilers_yaml, scope="site" + ) + clang = next( + c["compiler"] for c in compiler_config if c["compiler"]["spec"] == "clang@=11.0.0" ) - clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0") assert clang["paths"]["cc"] == str(compilers_dir / "clang") assert clang["paths"]["cxx"] == str(compilers_dir / "clang++") @@ -229,14 +193,14 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir) for name in ("gcc-8", "g++-8", "gfortran-8"): shutil.copy(compilers_dir / name, new_dir / name) # Set PATH to have the new folder searched first - os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir)) + os.environ["PATH"] = f"{str(new_dir)}:{str(compilers_dir)}" compiler("find", "--scope=site") - config = spack.compilers.get_compiler_config( - no_compilers_yaml, scope="site", init_config=False + compiler_config = spack.compilers.get_compiler_config_from_packages( + no_compilers_yaml, scope="site" ) - gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0") + gcc = next(c["compiler"] for c in compiler_config if c["compiler"]["spec"] == "gcc@=8.4.0") assert gcc["paths"] == { "cc": str(new_dir / "gcc-8"), "cxx": str(new_dir / "g++-8"), diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py index 0884f9b1a4e..db9be42745f 100644 --- a/lib/spack/spack/test/compilers/basics.py +++ b/lib/spack/spack/test/compilers/basics.py @@ -19,27 +19,6 @@ from spack.util.executable import Executable, ProcessError -@pytest.fixture() -def make_args_for_version(monkeypatch): - def _factory(version, path="/usr/bin/gcc"): - class MockOs: - pass - - compiler_name = "gcc" - compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) - monkeypatch.setattr(compiler_cls, "cc_version", lambda x: version) - - compiler_id = spack.compilers.CompilerID( - os=MockOs, compiler_name=compiler_name, version=None - ) - variation = spack.compilers.NameVariation(prefix="", suffix="") - return spack.compilers.DetectVersionArgs( - id=compiler_id, variation=variation, language="cc", path=path - ) - - return _factory - - def test_multiple_conflicting_compiler_definitions(mutable_config): compiler_def = { "compiler": { @@ -82,21 +61,6 @@ def test_get_compiler_duplicates(mutable_config, compiler_factory): assert len(duplicates) == 1 -@pytest.mark.parametrize( - "input_version,expected_version,expected_error", - [(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)], -) -def test_version_detection_is_empty( - make_args_for_version, input_version, expected_version, expected_error -): - args = make_args_for_version(version=input_version) - result, error = spack.compilers.detect_version(args) - if not error: - assert result.id.version == expected_version - - assert error == expected_error - - def test_compiler_flags_from_config_are_grouped(): compiler_entry = { "spec": "intel@17.0.2", @@ -906,51 +870,30 @@ def prepare_executable(name): @pytest.mark.parametrize( - "detected_versions,expected_length", + "compilers_extra_attributes,expected_length", [ # If we detect a C compiler we expect the result to be valid - ( - [ - spack.compilers.DetectVersionArgs( - id=spack.compilers.CompilerID( - os="ubuntu20.04", compiler_name="clang", version="12.0.0" - ), - variation=spack.compilers.NameVariation(prefix="", suffix="-12"), - language="cc", - path="/usr/bin/clang-12", - ), - spack.compilers.DetectVersionArgs( - id=spack.compilers.CompilerID( - os="ubuntu20.04", compiler_name="clang", version="12.0.0" - ), - variation=spack.compilers.NameVariation(prefix="", suffix="-12"), - language="cxx", - path="/usr/bin/clang++-12", - ), - ], - 1, - ), + ({"c": "/usr/bin/clang-12", "cxx": "/usr/bin/clang-12"}, 1), # If we detect only a C++ compiler we expect the result to be discarded - ( - [ - spack.compilers.DetectVersionArgs( - id=spack.compilers.CompilerID( - os="ubuntu20.04", compiler_name="clang", version="12.0.0" - ), - variation=spack.compilers.NameVariation(prefix="", suffix="-12"), - language="cxx", - path="/usr/bin/clang++-12", - ) - ], - 0, - ), + ({"cxx": "/usr/bin/clang-12"}, 0), ], ) -def test_detection_requires_c_compiler(detected_versions, expected_length): +def test_detection_requires_c_compiler(compilers_extra_attributes, expected_length): """Tests that compilers automatically added to the configuration have at least a C compiler. """ - result = spack.compilers.make_compiler_list(detected_versions) + packages_yaml = { + "llvm": { + "externals": [ + { + "spec": "clang@12.0.0", + "prefix": "/usr", + "extra_attributes": {"compilers": compilers_extra_attributes}, + } + ] + } + } + result = spack.compilers.CompilerConfigFactory.from_packages_yaml(packages_yaml) assert len(result) == expected_length diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 1af1fb1d90f..102ec7a96d8 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -493,11 +493,13 @@ def fake_package_list(compiler, architecture, pkgs): def test_bootstrapping_compilers_with_different_names_from_spec( install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host ): + """Tests that, when we bootstrap '%oneapi' we can translate it to the + 'intel-oneapi-compilers' package. + """ with spack.config.override("config:install_missing_compilers", True): with spack.concretize.disable_compiler_existence_check(): spec = spack.spec.Spec("trivial-install-test-package%oneapi@=22.2.0").concretized() spec.package.do_install() - assert ( spack.spec.CompilerSpec("oneapi@=22.2.0") in spack.compilers.all_compiler_specs() ) diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 4a905d2ddd2..01da46115b5 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1064,9 +1064,9 @@ set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -f -a mixed_toolchain -complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -d '(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -f -a mixed_toolchain -complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d '(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -d 'configuration scope to modify' @@ -1076,9 +1076,9 @@ set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed- complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -f -a mixed_toolchain -complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -d '(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -f -a mixed_toolchain -complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d '(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -d 'configuration scope to modify' diff --git a/var/spack/repos/builtin.mock/packages/gcc/package.py b/var/spack/repos/builtin.mock/packages/gcc/package.py index 05518419ddb..fa2b0309ff1 100644 --- a/var/spack/repos/builtin.mock/packages/gcc/package.py +++ b/var/spack/repos/builtin.mock/packages/gcc/package.py @@ -16,11 +16,23 @@ class Gcc(CompilerPackage, Package): version("2.0", md5="abcdef0123456789abcdef0123456789") version("3.0", md5="def0123456789abcdef0123456789abc") + variant( + "languages", + default="c,c++,fortran", + values=("c", "c++", "fortran"), + multi=True, + description="Compilers and runtime libraries to build", + ) + depends_on("conflict", when="@3.0") c_names = ["gcc"] cxx_names = ["g++"] fortran_names = ["gfortran"] + compiler_prefixes = [r"\w+-\w+-\w+-"] + compiler_suffixes = [r"-mp-\d+(?:\.\d+)?", r"-\d+(?:\.\d+)?", r"\d\d"] + compiler_version_regex = r"(?