diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index 4484620d1f5..bde5ba85b81 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -276,10 +276,6 @@ def update_external_dependencies(self, extendee_spec=None): if not python.architecture.target: python.architecture.target = archspec.cpu.host().family.name - # Ensure compiler information is present - if not python.compiler: - python.compiler = self.spec.compiler - python.external_path = self.spec.external_path python._mark_concrete() self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=()) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index cca1ad4262f..b7c87b5f18a 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -62,6 +62,7 @@ parse_term, ) from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter +from .libc import CompilerPropertyDetector from .requirements import RequirementKind, RequirementParser, RequirementRule from .version_order import concretization_version_order @@ -284,10 +285,10 @@ def all_libcs() -> Set[spack.spec.Spec]: libc determined from the current Python process if dynamically linked.""" libcs = { - c.default_libc + CompilerPropertyDetector(c).default_libc() for c in spack.compilers.all_compilers_from(spack.config.CONFIG) - if c.default_libc } + libcs.discard(None) if libcs: return libcs @@ -296,7 +297,7 @@ def all_libcs() -> Set[spack.spec.Spec]: return {libc} if libc else set() -def libc_is_compatible(lhs: spack.spec.Spec, rhs: spack.spec.Spec) -> List[spack.spec.Spec]: +def libc_is_compatible(lhs: spack.spec.Spec, rhs: spack.spec.Spec) -> bool: return ( lhs.name == rhs.name and lhs.external_path == rhs.external_path @@ -310,7 +311,7 @@ def using_libc_compatibility() -> bool: def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool: - return compiler.compiler_verbose_output is not None + return CompilerPropertyDetector(compiler).compiler_verbose_output() is not None def extend_flag_list(flag_list, new_flags): @@ -601,9 +602,9 @@ def _external_config_with_implicit_externals(configuration): return packages_yaml for compiler in spack.compilers.all_compilers_from(configuration): - libc = compiler.default_libc + libc = CompilerPropertyDetector(compiler).default_libc() if libc: - entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path} + entry = {"spec": f"{libc}", "prefix": libc.external_path} packages_yaml.setdefault(libc.name, {}).setdefault("externals", []).append(entry) return packages_yaml @@ -1061,13 +1062,13 @@ class SourceContext: Facts generated for the spec may include this context. """ - def __init__(self): + def __init__(self, *, source: Optional[str] = None): # This can be "literal" for constraints that come from a user # spec (e.g. from the command line); it can be the output of # `ConstraintOrigin.append_type_suffix`; the default is "none" # (which means it isn't important to keep track of the source # in that case). - self.source = "none" + self.source = "none" if source is None else source class ConditionIdContext(SourceContext): @@ -1261,16 +1262,6 @@ def conflict_rules(self, pkg): ) self.gen.newline() - def package_languages(self, pkg): - for when_spec, languages in pkg.languages.items(): - condition_msg = f"{pkg.name} needs the {', '.join(sorted(languages))} language" - if when_spec != spack.spec.Spec(): - condition_msg += f" when {when_spec}" - condition_id = self.condition(when_spec, required_name=pkg.name, msg=condition_msg) - for language in sorted(languages): - self.gen.fact(fn.pkg_fact(pkg.name, fn.language(condition_id, language))) - self.gen.newline() - def config_compatible_os(self): """Facts about compatible os's specified in configs""" self.gen.h2("Compatible OS from concretizer config file") @@ -1280,34 +1271,6 @@ def config_compatible_os(self): self.gen.fact(fn.os_compatible(recent, old)) self.gen.newline() - def compiler_facts(self): - """Facts about available compilers.""" - - self.gen.h2("Available compilers") - for compiler_id, compiler in enumerate(self.possible_compilers): - self.gen.fact(fn.compiler_id(compiler_id)) - self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name)) - self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version)) - - if compiler.os: - self.gen.fact(fn.compiler_os(compiler_id, compiler.os)) - - if compiler.target is not None: - self.gen.fact(fn.compiler_target(compiler_id, compiler.target)) - - if compiler.compiler_obj is not None: - c = compiler.compiler_obj - for flag_type, flags in c.flags.items(): - flag_group = " ".join(flags) - for flag in flags: - self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag, flag_group)) - - if compiler.available: - self.gen.fact(fn.compiler_available(compiler_id)) - - self.gen.fact(fn.compiler_weight(compiler_id, compiler_id)) - self.gen.newline() - def package_requirement_rules(self, pkg): self.emit_facts_from_requirement_rules(self.requirement_parser.rules(pkg)) @@ -1321,9 +1284,6 @@ def pkg_rules(self, pkg, tests): self.pkg_version_rules(pkg) self.gen.newline() - # languages - self.package_languages(pkg) - # variants self.variant_rules(pkg) @@ -1340,12 +1300,6 @@ def pkg_rules(self, pkg, tests): if self.enable_splicing: self.package_splice_rules(pkg) - # virtual preferences - self.virtual_preferences( - pkg.name, - lambda v, p, i: self.gen.fact(fn.pkg_fact(pkg.name, fn.provider_preference(v, p, i))), - ) - self.package_requirement_rules(pkg) # trigger and effect tables @@ -1910,10 +1864,15 @@ def external_packages(self): if pkg_name not in self.pkgs: continue + # This package is not in the possible dependencies + if pkg_name not in self.pkgs: + continue + # This package is not among possible dependencies if pkg_name not in self.pkgs: continue + self.gen.h2(f"External package: {pkg_name}") # Check if the external package is buildable. If it is # not then "external()" is a fact, unless we can # reuse an already installed spec. @@ -2130,28 +2089,6 @@ def _spec_clauses( else: clauses.append(f.variant_value(spec.name, vname, value)) - # compiler and compiler version - if spec.compiler: - clauses.append(f.node_compiler(spec.name, spec.compiler.name)) - - if spec.compiler.concrete: - clauses.append( - f.node_compiler_version(spec.name, spec.compiler.name, spec.compiler.version) - ) - - elif spec.compiler.versions and spec.compiler.versions != vn.any_version: - # The condition above emits a facts only if we have an actual constraint - # on the compiler version, and avoids emitting them if any version is fine - clauses.append( - fn.attr( - "node_compiler_version_satisfies", - spec.name, - spec.compiler.name, - spec.compiler.versions, - ) - ) - self.compiler_version_constraints.add(spec.compiler) - # compiler flags source = context.source if context else "none" for flag_type, flags in spec.compiler_flags.items(): @@ -2189,6 +2126,7 @@ def _spec_clauses( # If the spec is external and concrete, we allow all the libcs on the system if spec.external and spec.concrete and using_libc_compatibility(): + clauses.append(fn.attr("needs_libc", spec.name)) for libc in self.libcs: clauses.append(fn.attr("compatible_libc", spec.name, libc.name, libc.version)) @@ -2202,11 +2140,17 @@ def _spec_clauses( # GCC runtime is solved again by clingo, even on concrete specs, to give # the possibility to reuse specs built against a different runtime. if dep.name == "gcc-runtime": + clauses.append( + fn.attr("compatible_runtime", spec.name, dep.name, f"{dep.version}:") + ) + constraint_spec = spack.spec.Spec(f"{dep.name}@{dep.version}") + self.spec_versions(constraint_spec) continue # libc is also solved again by clingo, but in this case the compatibility # is not encoded in the parent node - so we need to emit explicit facts if "libc" in dspec.virtuals: + clauses.append(fn.attr("needs_libc", spec.name)) for libc in self.libcs: if libc_is_compatible(libc, dep): clauses.append( @@ -2241,15 +2185,23 @@ def _spec_clauses( # if it's concrete, then the hashes above take care of dependency # constraints, but expand the hashes if asked for. if not spec.concrete or expand_hashes: - clauses.extend( - self._spec_clauses( - dep, - body=body, - expand_hashes=expand_hashes, - concrete_build_deps=concrete_build_deps, - context=context, - ) + dependency_clauses = self._spec_clauses( + dep, + body=body, + expand_hashes=expand_hashes, + concrete_build_deps=concrete_build_deps, + context=context, ) + if dspec.depflag == dt.BUILD: + clauses.append(fn.attr("depends_on", spec.name, dep.name, "build")) + if body is False: + for clause in dependency_clauses: + clause.name = "build_requirement" + clauses.append(fn.attr("build_requirement", spec.name, clause)) + else: + clauses.extend(dependency_clauses) + else: + clauses.extend(dependency_clauses) return clauses @@ -2444,39 +2396,21 @@ def target_defaults(self, specs): candidate_targets.append(ancestor) best_targets = {uarch.family.name} - for compiler_id, known_compiler in enumerate(self.possible_compilers): - if not known_compiler.available: - continue - - compiler = known_compiler.compiler_obj - # Stub support for cross-compilation, to be expanded later - if known_compiler.target is not None and compiler.target not in ( - str(uarch.family), - "any", - ): - self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target)) - self.gen.newline() - continue - + for compiler in self.possible_compilers: supported = self._supported_targets(compiler.name, compiler.version, candidate_targets) - # If we can't find supported targets it may be due to custom - # versions in the spec, e.g. gcc@foo. Try to match the - # real_version from the compiler object to get more accurate - # results. - if not supported: - supported = self._supported_targets( - compiler.name, compiler.real_version, candidate_targets - ) - if not supported: continue for target in supported: best_targets.add(target.name) - self.gen.fact(fn.compiler_supports_target(compiler_id, target.name)) + self.gen.fact( + fn.compiler_supports_target(compiler.name, compiler.version, target.name) + ) - self.gen.fact(fn.compiler_supports_target(compiler_id, uarch.family.name)) + self.gen.fact( + fn.compiler_supports_target(compiler.name, compiler.version, uarch.family.name) + ) self.gen.newline() i = 0 # TODO compute per-target offset? @@ -2693,8 +2627,6 @@ def setup( self.explicitly_required_namespaces[node.name] = node.namespace self.gen = ProblemInstanceBuilder() - compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs) - if using_libc_compatibility(): for libc in self.libcs: self.gen.fact(fn.host_libc(libc.name, libc.version)) @@ -2723,11 +2655,10 @@ def setup( if reuse: self.gen.fact(fn.optimize_for_reuse()) for reusable_spec in reuse: - compiler_parser.add_compiler_from_concrete_spec(reusable_spec) self.register_concrete_spec(reusable_spec, self.pkgs) self.concrete_specs() - self.possible_compilers = compiler_parser.possible_compilers() + self.possible_compilers = possible_compilers(configuration=spack.config.CONFIG) self.gen.h1("Generic statements on possible packages") node_counter.possible_packages_facts(self.gen, fn) @@ -2739,7 +2670,6 @@ def setup( self.gen.h1("General Constraints") self.config_compatible_os() - self.compiler_facts() # architecture defaults self.platform_defaults() @@ -2825,7 +2755,7 @@ def visit(node): arg = ast_sym(ast_sym(term.atom).arguments[0]) symbol = AspFunction(name)(arg.string) self.assumptions.append((parse_term(str(symbol)), True)) - self.gen.asp_problem.append(f"{{ {symbol} }}.\n") + self.gen.asp_problem.append(f"{symbol}.\n") path = os.path.join(parent_dir, "concretize.lp") parse_files([path], visit) @@ -2835,33 +2765,32 @@ def define_runtime_constraints(self): recorder = RuntimePropertyRecorder(self) for compiler in self.possible_compilers: - compiler_with_different_cls_names = { - "oneapi": "intel-oneapi-compilers", - "clang": "llvm", - } - compiler_cls_name = compiler_with_different_cls_names.get( - compiler.spec.name, compiler.spec.name - ) try: - compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name) - if hasattr(compiler_cls, "runtime_constraints"): - compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder) + compiler_cls = spack.repo.PATH.get_pkg_class(compiler.name) except spack.repo.UnknownPackageError: pass + else: + if hasattr(compiler_cls, "runtime_constraints"): + compiler_cls.runtime_constraints(spec=compiler, pkg=recorder) + # Inject default flags for compilers + recorder("*").default_flags(compiler) - # Inject libc from available compilers, on Linux - if not compiler.available: - continue - - current_libc = compiler.compiler_obj.default_libc + current_libc = CompilerPropertyDetector(compiler).default_libc() + # If this is a compiler yet to be built infer libc from the Python process + # FIXME (compiler as nodes): recover this use case + # if not current_libc and compiler.compiler_obj.cc is None: + # current_libc = spack.util.libc.libc_from_current_python_process() if using_libc_compatibility() and current_libc: recorder("*").depends_on( - "libc", when=f"%{compiler.spec}", type="link", description="Add libc" + "libc", + when=f"%{compiler.name}@{compiler.versions}", + type="link", + description="Add libc", ) recorder("*").depends_on( str(current_libc), - when=f"%{compiler.spec}", + when=f"%{compiler.name}@{compiler.versions}", type="link", description="Add libc", ) @@ -2899,6 +2828,9 @@ def literal_specs(self, specs): # These facts are needed to compute the "condition_set" of the root pkg_name = clause.args[1] self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) + elif clause_name == "depends_on": + pkg_name = clause.args[2] + self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name)) cache[imposed_spec_key] = (effect_id, requirements) @@ -3063,102 +2995,35 @@ def value(self) -> str: return "".join(self.asp_problem) -class CompilerParser: - """Parses configuration files, and builds a list of possible compilers for the solve.""" +def possible_compilers(*, configuration) -> List["spack.spec.Spec"]: + result = set() + for c in spack.compilers.all_compilers_from(configuration): + # FIXME (compiler as nodes): Discard early specs that are not marked for this target? - def __init__(self, configuration) -> None: - self.compilers: Set[KnownCompiler] = set() - for c in spack.compilers.all_compilers_from(configuration): - if using_libc_compatibility() and not c_compiler_runs(c): - tty.debug( - f"the C compiler {c.cc} does not exist, or does not run correctly." - f" The compiler {c.spec} will not be used during concretization." - ) - continue - - if using_libc_compatibility() and not c.default_libc: - warnings.warn( - f"cannot detect libc from {c.spec}. The compiler will not be used " - f"during concretization." - ) - continue - - target = c.target if c.target != "any" else None - candidate = KnownCompiler( - spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c + if using_libc_compatibility() and not c_compiler_runs(c): + compiler = c.extra_attributes["compilers"]["c"] + tty.debug( + f"the C compiler {compiler} does not exist, or does not run correctly." + f" The compiler {c} will not be used during concretization." ) - if candidate in self.compilers: - warnings.warn( - f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. " - f"Edit your compilers.yaml configuration to remove it." - ) - continue + continue - self.compilers.add(candidate) - - def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser": - """Accounts for input specs when building the list of possible compilers. - - Args: - input_specs: specs to be concretized - """ - strict = spack.concretize.CHECK_COMPILER_EXISTENCE - default_os = str(spack.platforms.host().default_os) - default_target = str(archspec.cpu.host().family) - for s in traverse.traverse_nodes(input_specs): - # we don't need to validate compilers for already-built specs - if s.concrete or not s.compiler: - continue - - version = s.compiler.versions.concrete - - if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers): - continue - - # Error when a compiler is not found and strict mode is enabled - if strict: - raise spack.concretize.UnavailableCompilerVersionError(s.compiler) - - # Make up a compiler matching the input spec. This is for bootstrapping. - compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name) - compiler_obj = compiler_cls( - s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4 + if using_libc_compatibility() and not CompilerPropertyDetector(c).default_libc(): + warnings.warn( + f"cannot detect libc from {c}. The compiler will not be used " + f"during concretization." ) - self.compilers.add( - KnownCompiler( - spec=s.compiler, - os=default_os, - target=default_target, - available=True, - compiler_obj=compiler_obj, - ) + continue + + if c in result: + warnings.warn( + f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. " + f"Edit your compilers.yaml configuration to remove it." ) + continue - return self - - def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None: - """Account for compilers that are coming from concrete specs, through reuse. - - Args: - spec: concrete spec to be reused - """ - assert spec.concrete, "the spec argument must be concrete" - candidate = KnownCompiler( - spec=spec.compiler, - os=str(spec.architecture.os), - target=str(spec.architecture.target.family), - available=False, - compiler_obj=None, - ) - self.compilers.add(candidate) - - def possible_compilers(self) -> List[KnownCompiler]: - # Here we have to sort two times, first sort by name and ascending version - result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True) - # Then stable sort to prefer available compilers and account for preferences - ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False) - result.sort(key=lambda x: (not x.available, ppk(x.spec))) - return result + result.add(c) + return sorted(result) class RuntimePropertyRecorder: @@ -3201,15 +3066,7 @@ def reset(self): """Resets the current state.""" self.current_package = None - def depends_on( - self, - dependency_str: str, - *, - when: str, - type: str, - description: str, - languages: Optional[List[str]] = None, - ) -> None: + def depends_on(self, dependency_str: str, *, when: str, type: str, description: str) -> None: """Injects conditional dependencies on packages. Conditional dependencies can be either "real" packages or virtual dependencies. @@ -3218,7 +3075,6 @@ def depends_on( dependency_str: the dependency spec to inject when: anonymous condition to be met on a package to have the dependency type: dependency type - languages: languages needed by the package for the dependency to be considered description: human-readable description of the rule for adding the dependency """ # TODO: The API for this function is not final, and is still subject to change. At @@ -3234,25 +3090,10 @@ def depends_on( if dependency_spec.versions != vn.any_version: self._setup.version_constraints.add((dependency_spec.name, dependency_spec.versions)) - placeholder = "XXX" - node_variable = "node(ID, Package)" - when_spec.name = placeholder - - body_clauses = self._setup.spec_clauses(when_spec, body=True) - body_str = ( - f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n" - f" not external({node_variable}),\n" - f" not runtime(Package)" - ).replace(f'"{placeholder}"', f"{node_variable}") - if languages: - body_str += ",\n" - for language in languages: - body_str += f' attr("language", {node_variable}, "{language}")' + body_str, node_variable = self.rule_body_from(when_spec) head_clauses = self._setup.spec_clauses(dependency_spec, body=False) - runtime_pkg = dependency_spec.name - is_virtual = head_clauses[0].args[0] == "virtual_node" main_rule = ( f"% {description}\n" @@ -3287,6 +3128,33 @@ def depends_on( self.reset() + def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]: + """Computes the rule body from a "when" spec, and returns it, along with the + node variable. + """ + node_placeholder = "XXX" + node_variable = "node(ID, Package)" + when_substitutions = {} + for s in when_spec.traverse(root=False): + when_substitutions[f'"{s.name}"'] = f'node(ID{s.name}, "{s.name}")' + when_spec.name = node_placeholder + body_clauses = self._setup.spec_clauses(when_spec, body=True) + for clause in body_clauses: + if clause.args[0] == "virtual_on_incoming_edges": + # Substitute: attr("virtual_on_incoming_edges", ProviderNode, Virtual) + # with: attr("virtual_on_edge", ParentNode, ProviderNode, Virtual) + # (avoid adding virtuals everywhere, if a single edge needs it) + _, provider, virtual = clause.args + clause.args = "virtual_on_edge", node_placeholder, provider, virtual + body_str = ( + f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n" + f" not external({node_variable}),\n" + f" not runtime(Package)" + ).replace(f'"{node_placeholder}"', f"{node_variable}") + for old, replacement in when_substitutions.items(): + body_str = body_str.replace(old, replacement) + return body_str, node_variable + def requires(self, impose: str, *, when: str): """Injects conditional requirements on a given package. @@ -3301,7 +3169,6 @@ def requires(self, impose: str, *, when: str): when_spec = spack.spec.Spec(f"{self.current_package}{when}") assert imposed_spec.versions.concrete, f"{impose} must have a concrete version" - assert when_spec.compiler.concrete, f"{when} must have a concrete compiler" # Add versions to possible versions for s in (imposed_spec, when_spec): @@ -3322,32 +3189,54 @@ def propagate(self, constraint_str: str, *, when: str): when_spec = spack.spec.Spec(when) assert when_spec.name is None, "only anonymous when specs are accepted" - placeholder = "XXX" - node_variable = "node(ID, Package)" - when_spec.name = placeholder - - body_clauses = self._setup.spec_clauses(when_spec, body=True) - body_str = ( - f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n" - f" not external({node_variable}),\n" - f" not runtime(Package)" - ).replace(f'"{placeholder}"', f"{node_variable}") + when_substitutions = {} + for s in when_spec.traverse(root=False): + when_substitutions[f'"{s.name}"'] = f'node(ID{s.name}, "{s.name}")' + body_str, node_variable = self.rule_body_from(when_spec) constraint_spec = spack.spec.Spec(constraint_str) - assert constraint_spec.name is None, "only anonymous constraint specs are accepted" - constraint_spec.name = placeholder + # constraint_spec.name = placeholder constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False) for clause in constraint_clauses: - if clause.args[0] == "node_compiler_version_satisfies": - self._setup.compiler_version_constraints.add(constraint_spec.compiler) - args = f'"{constraint_spec.compiler.name}", "{constraint_spec.compiler.versions}"' - head_str = f"propagate({node_variable}, node_compiler_version_satisfies({args}))" + if clause.args[0] == "node_version_satisfies": + self._setup.version_constraints.add( + (constraint_spec.name, constraint_spec.versions) + ) + args = f'"{constraint_spec.name}", "{constraint_spec.versions}"' + head_str = f"propagate({node_variable}, node_version_satisfies({args}))" rule = f"{head_str} :-\n{body_str}.\n\n" self.rules.append(rule) self.reset() + def default_flags(self, spec: "spack.spec.Spec"): + if not spec.external or "flags" not in spec.extra_attributes: + self.reset() + return + + when_spec = spack.spec.Spec(f"^[deptypes=build] {spec}") + body_str, node_variable = self.rule_body_from(when_spec) + + node_placeholder = "XXX" + flags = spec.extra_attributes["flags"] + root_spec_str = f"{node_placeholder}" + for flag_type, default_values in flags.items(): + root_spec_str = f"{root_spec_str} {flag_type}='{default_values}'" + root_spec = spack.spec.Spec(root_spec_str) + head_clauses = self._setup.spec_clauses( + root_spec, body=False, context=SourceContext(source="compiler") + ) + self.rules.append(f"% Default compiler flags for {spec}\n") + for clause in head_clauses: + if clause.args[0] == "node": + continue + head_str = str(clause).replace(f'"{node_placeholder}"', f"{node_variable}") + rule = f"{head_str} :-\n{body_str}.\n\n" + self.rules.append(rule) + + self.reset() + def consume_facts(self): """Consume the facts collected by this object, and emits rules and facts for the runtimes. @@ -3534,17 +3423,14 @@ def reorder_flags(self): e.g. for `y cflags="-z -a"` "-z" and "-a" should never have any intervening flags inserted, and should always appear in that order. """ - # reverse compilers so we get highest priority compilers that share a spec - compilers = dict( - (c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG)) - ) - cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse()) + cmd_specs = {s.name: s for spec in self._command_line_specs for s in spec.traverse()} for spec in self._specs.values(): # if bootstrapping, compiler is not in config and has no flags - flagmap_from_compiler = {} - if spec.compiler in compilers: - flagmap_from_compiler = compilers[spec.compiler].flags + flagmap_from_compiler = { + flag_type: [x for x in values if x.source == "compiler"] + for flag_type, values in spec.compiler_flags.items() + } for flag_type in spec.compiler_flags.valid_compiler_flags(): node = SpecBuilder.make_node(pkg=spec.name) @@ -3765,6 +3651,14 @@ def build_specs(self, function_tuples): for root in roots.values(): root._finalize_concretization() + # Unify hashes (this is to avoid duplicates of runtimes and compilers) + unifier = ConcreteSpecsByHash() + keys = list(self._specs) + for key in keys: + current_spec = self._specs[key] + unifier.add(current_spec) + self._specs[key] = unifier[current_spec.dag_hash()] + self._resolve_automatic_splices() for s in self._specs.values(): @@ -3888,10 +3782,10 @@ def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool: if not WITH_RUNTIME: return True - if spec.compiler.name == "gcc" and not spec.dependencies("gcc-runtime"): + if "gcc" in spec and "gcc-runtime" not in spec: return False - if spec.compiler.name == "oneapi" and not spec.dependencies("intel-oneapi-runtime"): + if "intel-oneapi-compilers" in spec and "intel-oneapi-runtime" not in spec: return False return True @@ -4144,6 +4038,14 @@ def _check_input_and_extract_concrete_specs(specs): continue if s.concrete: reusable.append(s) + + try: + s.package_class + except spack.repo.UnknownPackageError: + raise UnsatisfiableSpecError( + f"cannot concretize '{root}', since '{s.name}' does not exist" + ) + spack.spec.Spec.ensure_valid_variants(s) return reusable diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 10b65413a58..cba0040a080 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -38,8 +38,6 @@ internal_error("Only nodes can have node_os"). :- attr("node_target", PackageNode, _), not attr("node", PackageNode), internal_error("Only nodes can have node_target"). -:- attr("node_compiler_version", PackageNode, _, _), not attr("node", PackageNode), - internal_error("Only nodes can have node_compiler_version"). :- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode), internal_error("variant_value true for a non-node"). :- attr("node_flag", PackageNode, _), not attr("node", PackageNode), @@ -146,6 +144,15 @@ unification_set(SetID, VirtualNode) max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1, internal_error("virtual node skipped id number"). +% Prefer to assign lower ID to virtuals associated with a lower penalty provider +:- not unification_set("root", node(X, Virtual)), + not unification_set("root", node(Y, Virtual)), + X < Y, + provider_weight(_, node(X, Virtual), WeightX), + provider_weight(_, node(Y, Virtual), WeightY), + WeightY < WeightX. + + %----------------------------------------------------------------------------- % Map literal input specs to facts that drive the solve %----------------------------------------------------------------------------- @@ -216,14 +223,6 @@ error(100, multiple_values_error, Attribute, Package) attr_single_value(Attribute), 2 { attr(Attribute, node(ID, Package), Value) }. -%----------------------------------------------------------------------------- -% Languages used -%----------------------------------------------------------------------------- - -attr("language", node(X, Package), Language) :- - condition_holds(ConditionID, node(X, Package)), - pkg_fact(Package,language(ConditionID, Language)). - %----------------------------------------------------------------------------- % Version semantics %----------------------------------------------------------------------------- @@ -390,6 +389,7 @@ trigger_condition_holds(ID, RequestorNode) :- attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name); attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1)); % Special cases + attr("depends_on", node(X, A1), node(Y, A2), A3) : condition_requirement(ID, "depends_on", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A2)); not cannot_hold(ID, PackageNode). condition_holds(ConditionID, node(X, Package)) @@ -455,6 +455,28 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua imposed_constraint(ID, "depends_on", A1, A2, A3), internal_error("Build deps must land in exactly one duplicate"). +% The rule below accounts for expressions like: +% +% root ^dep %compiler +% +% where "compiler" is a dependency of "dep", but is enforced by a condition imposed by "root" +1 { attr("depends_on", node(min_dupe_id, A1), node(0..Y-1, A2), A3) : max_dupes(A2, Y) } 1 + :- impose(ID, RootNode), + unification_set("root", RootNode), + unification_set("root", node(min_dupe_id, A1)), + imposed_constraint(ID, "depends_on", A1, A2, A3), + internal_error("Build deps must land in exactly one duplicate"). + +% From cli we can have literal expressions like: +% +% root %gcc@12.0 ^dep %gcc@11.2 +% +% Adding a "build_requirement" is a way to discriminate between the incompatible +% version constraints on "gcc" in the "imposed_constraint". +attr("node_version_satisfies", node(X, BuildDependency), Constraint) :- + attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)), + attr("depends_on", ParentNode, node(X, BuildDependency), "build"). + % Reconstruct virtual dependencies for reused specs attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual) :- impose(ID, node(X, A1)), @@ -494,9 +516,12 @@ virtual_condition_holds(node(Y, A2), Virtual) %----------------------------------------------------------------------------- % Concrete specs %----------------------------------------------------------------------------- + % if a package is assigned a hash, it's concrete. concrete(PackageNode) :- attr("hash", PackageNode, _), attr("node", PackageNode). +:- concrete(PackageNode), depends_on(PackageNode, DependencyNode), not concrete(DependencyNode). + %----------------------------------------------------------------------------- % Dependency semantics %----------------------------------------------------------------------------- @@ -518,11 +543,30 @@ attr("track_dependencies", Node) :- build(Node), not external(Node). % this ensures a user can't say `zlib ^libiconv` (neither of which have any % dependencies) and get a two-node unconnected graph needed(PackageNode) :- attr("root", PackageNode). -needed(DependencyNode) :- needed(PackageNode), depends_on(PackageNode, DependencyNode). +needed(ChildNode) :- edge_needed(ParentNode, ChildNode). + +edge_needed(ParentNode, node(X, Child)) :- depends_on(ParentNode, node(X, Child)), runtime(Child). +edge_needed(ParentNode, ChildNode) :- depends_on(ParentNode, ChildNode) , concrete(ParentNode). + +edge_needed(ParentNode, node(X, Child)) :- + depends_on(ParentNode, node(X, Child)), + build(ParentNode), + attr("dependency_holds", ParentNode, Child, _). + +edge_needed(ParentNode, ChildNode) :- + depends_on(ParentNode, ChildNode), + build(ParentNode), + node_depends_on_virtual(ParentNode, Virtual), + provider(ChildNode, node(_, Virtual)). + error(10, "'{0}' is not a valid dependency for any package in the DAG", Package) :- attr("node", node(ID, Package)), not needed(node(ID, Package)). +:- depends_on(ParentNode, ChildNode), + not edge_needed(ParentNode, ChildNode), + build(ParentNode). + #defined dependency_type/2. %----------------------------------------------------------------------------- @@ -549,14 +593,18 @@ possible_provider_weight(ProviderNode, VirtualNode, 0, "Set on the command line" % Enforces all virtuals to be provided, if multiple of them are provided together error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but provides only '{1}'", Package, Virtual1, Virtual2) -:- condition_holds(ID, node(X, Package)), +:- % This package provides 2 or more virtuals together + condition_holds(ID, node(X, Package)), pkg_fact(Package, provided_together(ID, SetID, Virtual1)), pkg_fact(Package, provided_together(ID, SetID, Virtual2)), Virtual1 != Virtual2, - attr("virtual_on_incoming_edges", node(X, Package), Virtual1), - not attr("virtual_on_incoming_edges", node(X, Package), Virtual2), - attr("virtual_node", node(_, Virtual1)), - attr("virtual_node", node(_, Virtual2)). + % One node depends on those virtuals AND on this package + node_depends_on_virtual(ClientNode, Virtual1), + node_depends_on_virtual(ClientNode, Virtual2), + depends_on(ClientNode, node(X, Package)), + % But this package is a provider of only one of them + provider(node(X, Package), node(_, Virtual1)), + not provider(node(X, Package), node(_, Virtual2)). % if a package depends on a virtual, it's not external and we have a % provider for that virtual then it depends on the provider @@ -1136,9 +1184,10 @@ error(100, "Cannot propagate the variant '{0}' from the package: {1} because pac propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), SourceNode) :- propagate(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), _), not attr("node_flag_set", node(PackageID, Package), node_flag(FlagType, _, _, "literal")), + % FIXME (compiler as nodes): do we need to match the compiler? % Same compiler as propagation source - node_compiler(node(PackageID, Package), CompilerID), - node_compiler(SourceNode, CompilerID), + % node_compiler(node(PackageID, Package), CompilerID), + % node_compiler(SourceNode, CompilerID), attr("propagate", SourceNode, node_flag(FlagType, Flag, FlagGroup, Source), _), node(PackageID, Package) != SourceNode, not runtime(Package). @@ -1155,12 +1204,17 @@ error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Sour % Compiler constraints %---- -attr("node_compiler_version_satisfies", node(ID, Package), Compiler, Version) :- - propagate(node(ID, Package), node_compiler_version_satisfies(Compiler, Version)), - node_compiler(node(ID, Package), CompilerID), - compiler_name(CompilerID, Compiler), - not runtime(Package), - not external(Package). +% If a node is built, impose constraints on the compiler coming from dependents +attr("node_version_satisfies", node(Y, Compiler), VersionRange) :- + propagate(node(X, Package), node_version_satisfies(Compiler, VersionRange)), + attr("depends_on", node(X, Package), node(Y, Compiler), "build"), + not external(node(X, Package)), + not runtime(Package). + +attr("node_version_satisfies", node(X, Runtime), VersionRange) :- + attr("node", node(X, Runtime)), + attr("compatible_runtime", PackageNode, Runtime, VersionRange), + concrete(PackageNode). %----------------------------------------------------------------------------- % Runtimes @@ -1169,11 +1223,19 @@ attr("node_compiler_version_satisfies", node(ID, Package), Compiler, Version) :- % Check whether the DAG has any built package has_built_packages() :- build(X), not external(X). -% If we build packages, the runtime nodes must use an available compiler -1 { node_compiler(PackageNode, CompilerID) : build(PackageNode), not external(PackageNode) } :- - has_built_packages(), - runtime(RuntimePackage), - node_compiler(node(_, RuntimePackage), CompilerID). +% "gcc-runtime" is always built +:- concrete(node(X, "gcc-runtime")), has_built_packages(). + +% FIXME (compiler as nodes): is this version constraint always required and better than the callback? +% "gcc-runtime" and the "gcc" it depends on must be at the same version +% attr("version", node(X, "gcc"), Version) :- +% attr("version", node(X, "gcc-runtime"), Version), +% attr("depends_on", node(X, "gcc-runtime"), node(Y, "gcc"), "build"). + +% The "gcc" linked to "gcc-runtime" must be used by at least another package +:- attr("depends_on", node(X, "gcc-runtime"), node(Y, "gcc"), "build"), + not 2 { attr("depends_on", PackageNode, node(Y, "gcc"), "build") : attr("node", PackageNode) }. + %----------------------------------------------------------------------------- % Platform semantics @@ -1246,7 +1308,7 @@ attr("node_target_satisfies", PackageNode, Constraint) % If a node has a target, all of its dependencies must be compatible with that target error(100, "Cannot find compatible targets for {0} and {1}", Package, Dependency) - :- depends_on(node(X, Package), node(Y, Dependency)), + :- attr("depends_on", node(X, Package), node(Y, Dependency), Type), Type != "build", attr("node_target", node(X, Package), Target), not node_target_compatible(node(Y, Dependency), Target). @@ -1258,29 +1320,23 @@ node_target_compatible(PackageNode, Target) target_compatible(Target, MyTarget). #defined target_satisfies/2. +compiler(Compiler) :- compiler_supports_target(Compiler, _, _). % can't use targets on node if the compiler for the node doesn't support them error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version) :- attr("node_target", node(X, Package), Target), - node_compiler(node(X, Package), CompilerID), - not compiler_supports_target(CompilerID, Target), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, Version), + attr("depends_on", node(X, Package), node(Y, Compiler), "build"), + attr("version", node(Y, Compiler), Version), + compiler(Compiler), + not compiler_supports_target(Compiler, Version, Target), build(node(X, Package)). #defined compiler_supports_target/2. -#defined compiler_available/1. % if a target is set explicitly, respect it attr("node_target", PackageNode, Target) :- attr("node", PackageNode), attr("node_target_set", PackageNode, Target). -% each node has the weight of its assigned target -target_weight(Target, 0) - :- attr("node", PackageNode), - attr("node_target", PackageNode, Target), - attr("node_target_set", PackageNode, Target). - node_target_weight(PackageNode, MinWeight) :- attr("node", PackageNode), attr("node_target", PackageNode, Target), @@ -1305,150 +1361,12 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ attr("node_target", node(X, Package), Target), not target(Target). -%----------------------------------------------------------------------------- -% Compiler semantics -%----------------------------------------------------------------------------- -% There must be only one compiler set per built node. -{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :- - attr("node", PackageNode), - build(PackageNode). - -% Infer the compiler that matches a reused node -node_compiler(PackageNode, CompilerID) - :- attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion), - attr("node", PackageNode), - compiler_name(CompilerID, CompilerName), - compiler_version(CompilerID, CompilerVersion), - concrete(PackageNode). - -% Expand the internal attribute into "attr("node_compiler_version") -attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion) - :- node_compiler(PackageNode, CompilerID), - compiler_name(CompilerID, CompilerName), - compiler_version(CompilerID, CompilerVersion), - compiler_available(CompilerID), - build(PackageNode). - -attr("node_compiler", PackageNode, CompilerName) - :- attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion). - -error(100, "No valid compiler version found for '{0}'", Package) - :- attr("node", node(X, Package)), - not node_compiler(node(X, Package), _). - -% We can't have a compiler be enforced and select the version from another compiler -error(100, "Cannot select a single compiler for package {0}", Package) - :- attr("node", node(X, Package)), - 2 { attr("node_compiler_version", node(X, Package), C, V) }. - -% If the compiler of a node cannot be satisfied, raise -error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler) - :- attr("node", node(X, Package)), - attr("node_compiler_version_satisfies", node(X, Package), Compiler, ":"), - not compiler_version_satisfies(Compiler, ":", _). - -% If the compiler of a node must satisfy a constraint, then its version -% must be chosen among the ones that satisfy said constraint -error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint) - :- attr("node", node(X, Package)), - attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint), - not compiler_version_satisfies(Compiler, Constraint, _). - -error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint) - :- attr("node", node(X, Package)), - attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint), - not compiler_version_satisfies(Compiler, Constraint, ID), - node_compiler(node(X, Package), ID). - -% If the node is associated with a compiler and the compiler satisfy a constraint, then -% the compiler associated with the node satisfy the same constraint -attr("node_compiler_version_satisfies", PackageNode, Compiler, Constraint) - :- node_compiler(PackageNode, CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version_satisfies(Compiler, Constraint, CompilerID). - -#defined compiler_version_satisfies/3. - -% If the compiler version was set from the command line, -% respect it verbatim -error(100, "Cannot set the required compiler: {2}%{0}@{1}", Compiler, Version, Package) - :- attr("node_compiler_version_set", node(X, Package), Compiler, Version), - not attr("node_compiler_version", node(X, Package), Compiler, Version). - -error(100, "Cannot set the required compiler: {1}%{0}", Compiler, Package) - :- attr("node_compiler_set", node(X, Package), Compiler), - not attr("node_compiler_version", node(X, Package), Compiler, _). - -% Cannot select a compiler if it is not supported on the OS -% Compilers that are explicitly marked as allowed -% are excluded from this check -error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS) - :- attr("node_os", node(X, Package), OS), - node_compiler(node(X, Package), CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, Version), - compiler_os(CompilerID, CompilerOS), - not os_compatible(CompilerOS, OS), - build(node(X, Package)). - -% If a package and one of its dependencies don't have the -% same compiler there's a mismatch. -compiler_match(PackageNode, DependencyNode) - :- depends_on(PackageNode, DependencyNode), - node_compiler(PackageNode, CompilerID), - node_compiler(DependencyNode, CompilerID). - -compiler_mismatch(PackageNode, DependencyNode) - :- depends_on(PackageNode, DependencyNode), - not attr("node_compiler_set", DependencyNode, _), - not compiler_match(PackageNode, DependencyNode). - -compiler_mismatch_required(PackageNode, DependencyNode) - :- depends_on(PackageNode, DependencyNode), - attr("node_compiler_set", DependencyNode, _), - not compiler_match(PackageNode, DependencyNode). - -#defined compiler_os/3. - -% compilers weighted by preference according to packages.yaml -node_compiler_weight(node(ID, Package), Weight) - :- node_compiler(node(ID, Package), CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, V), - compiler_weight(CompilerID, Weight). - -node_compiler_weight(node(ID, Package), 100) - :- node_compiler(node(ID, Package), CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, V), - not compiler_weight(CompilerID, _). - -% For the time being, be strict and reuse only if the compiler match one we have on the system -error(100, "Compiler {1}@{2} requested for {0} cannot be found.", Package, Compiler, Version) - :- attr("node_compiler_version", node(ID, Package), Compiler, Version), - not node_compiler(node(ID, Package), _). - -#defined node_compiler_preference/4. -#defined compiler_weight/3. - %----------------------------------------------------------------------------- % Compiler flags %----------------------------------------------------------------------------- -% compiler flags from compilers.yaml are put on nodes if compiler matches -attr("node_flag", PackageNode, node_flag(FlagType, Flag, FlagGroup, CompilerID)) - :- compiler_flag(CompilerID, FlagType, Flag, FlagGroup), - node_compiler(PackageNode, CompilerID), - flag_type(FlagType), - compiler_id(CompilerID), - compiler_name(CompilerID, CompilerName), - compiler_version(CompilerID, Version). - attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, NodeFlag). -#defined compiler_flag/4. - - %----------------------------------------------------------------------------- % Installed Packages %----------------------------------------------------------------------------- @@ -1665,6 +1583,17 @@ opt_criterion(50, "number of non-default variants (non-roots)"). build_priority(PackageNode, Priority) }. +% Minimize the weights of the providers, i.e. use as much as +% possible the first providers +opt_criterion(48, "number of duplicate virtuals needed"). +#minimize{ 0@248: #true }. +#minimize{ 0@48: #true }. +#minimize{ + Weight@48+Priority,ProviderNode,Virtual + : provider(ProviderNode, node(Weight, Virtual)), + build_priority(ProviderNode, Priority) +}. + % Minimize the weights of the providers, i.e. use as much as % possible the most preferred providers opt_criterion(45, "preferred providers (non-roots)"). @@ -1677,27 +1606,6 @@ opt_criterion(45, "preferred providers (non-roots)"). build_priority(ProviderNode, Priority) }. -% Try to minimize the number of compiler mismatches in the DAG. -opt_criterion(40, "compiler mismatches that are not required"). -#minimize{ 0@240: #true }. -#minimize{ 0@40: #true }. -#minimize{ - 1@40+Priority,PackageNode,node(ID, Dependency) - : compiler_mismatch(PackageNode, node(ID, Dependency)), - build_priority(node(ID, Dependency), Priority), - not runtime(Dependency) -}. - -opt_criterion(39, "compiler mismatches that are required"). -#minimize{ 0@239: #true }. -#minimize{ 0@39: #true }. -#minimize{ - 1@39+Priority,PackageNode,node(ID, Dependency) - : compiler_mismatch_required(PackageNode, node(ID, Dependency)), - build_priority(node(ID, Dependency), Priority), - not runtime(Dependency) -}. - opt_criterion(30, "non-preferred OS's"). #minimize{ 0@230: #true }. #minimize{ 0@30: #true }. @@ -1730,17 +1638,6 @@ opt_criterion(20, "default values of variants not being used (non-roots)"). build_priority(PackageNode, Priority) }. -% Try to use preferred compilers -opt_criterion(15, "non-preferred compilers"). -#minimize{ 0@215: #true }. -#minimize{ 0@15: #true }. -#minimize{ - Weight@15+Priority,node(X, Package) - : node_compiler_weight(node(X, Package), Weight), - build_priority(node(X, Package), Priority), - not runtime(Package) -}. - % Minimize the number of mismatches for targets in the DAG, try % to select the preferred target. opt_criterion(10, "target mismatches"). @@ -1764,20 +1661,6 @@ opt_criterion(5, "non-preferred targets"). }. -% Minimize the number of compiler mismatches for runtimes -opt_criterion(4, "compiler mismatches (runtimes)"). -#minimize{ 0@204: #true }. -#minimize{ 0@4: #true }. -#minimize{ - 1@4,PackageNode,node(ID, Dependency) - : compiler_mismatch(PackageNode, node(ID, Dependency)), runtime(Dependency) -}. -#minimize{ - 1@4,PackageNode,node(ID, Dependency) - : compiler_mismatch_required(PackageNode, node(ID, Dependency)), runtime(Dependency) -}. - - % Choose more recent versions for runtimes opt_criterion(3, "version badness (runtimes)"). #minimize{ 0@203: #true }. diff --git a/lib/spack/spack/solver/counter.py b/lib/spack/spack/solver/counter.py index 4e3150cb4a0..0176d720c52 100644 --- a/lib/spack/spack/solver/counter.py +++ b/lib/spack/spack/solver/counter.py @@ -125,22 +125,28 @@ def _compute_cache_values(self): self._possible_dependencies = set(self._link_run) | set(self._total_build) def possible_packages_facts(self, gen, fn): - build_tools = spack.repo.PATH.packages_with_tags("build-tools") + build_tools = set() + for current_tag in ("build-tools", "compiler"): + build_tools.update(spack.repo.PATH.packages_with_tags(current_tag)) + gen.h2("Packages with at most a single node") for package_name in sorted(self.possible_dependencies() - build_tools): gen.fact(fn.max_dupes(package_name, 1)) gen.newline() - gen.h2("Packages with at multiple possible nodes (build-tools)") + gen.h2("Packages with multiple possible nodes (build-tools)") for package_name in sorted(self.possible_dependencies() & build_tools): gen.fact(fn.max_dupes(package_name, 2)) gen.fact(fn.multiple_unification_sets(package_name)) gen.newline() gen.h2("Maximum number of nodes (virtual packages)") - for package_name in sorted(self.possible_virtuals()): + for package_name in sorted(self._link_run_virtuals): gen.fact(fn.max_dupes(package_name, 1)) gen.newline() + for package_name in sorted(self.possible_virtuals() - self._link_run_virtuals): + gen.fact(fn.max_dupes(package_name, 2)) + gen.newline() gen.h2("Possible package in link-run subDAG") for name in sorted(self._link_run): diff --git a/lib/spack/spack/solver/heuristic.lp b/lib/spack/spack/solver/heuristic.lp index c793276a452..76ba0b1fcd4 100644 --- a/lib/spack/spack/solver/heuristic.lp +++ b/lib/spack/spack/solver/heuristic.lp @@ -14,6 +14,8 @@ #heuristic attr("virtual_node", node(X, Virtual)). [60, init] #heuristic attr("virtual_node", node(X, Virtual)). [-1, sign] #heuristic attr("virtual_node", node(0, Virtual)) : node_depends_on_virtual(PackageNode, Virtual). [1@2, sign] +#heuristic attr("virtual_node", node(0, "c")). [1@3, sign] +#heuristic attr("virtual_node", node(0, "cxx")). [1@3, sign] #heuristic attr("depends_on", ParentNode, ChildNode, Type). [150, init] #heuristic attr("depends_on", ParentNode, ChildNode, Type). [4, factor] @@ -37,6 +39,3 @@ % Use default targets #heuristic attr("node_target", node(PackageID, Package), Target). [-1, sign] #heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, 0), attr("node", node(PackageID, Package)). [1@2, sign] - -% Use the default compilers -#heuristic node_compiler(node(PackageID, Package), ID) : compiler_weight(ID, 0), compiler_id(ID), attr("node", node(PackageID, Package)). [30, init] diff --git a/lib/spack/spack/solver/libc.py b/lib/spack/spack/solver/libc.py new file mode 100644 index 00000000000..59669a9a2ee --- /dev/null +++ b/lib/spack/spack/solver/libc.py @@ -0,0 +1,116 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import contextlib +import os +import shutil +import tempfile +import typing +from typing import Optional + +import llnl.util.tty as tty + +import spack.util.libc + +if typing.TYPE_CHECKING: + import spack.spec + + +class CompilerPropertyDetector: + + _CACHE = {} + + def __init__(self, compiler_spec: "spack.spec.Spec"): + assert compiler_spec.external, "only external compiler specs are allowed, so far" + assert compiler_spec.concrete, "only concrete compiler specs are allowed, so far" + self.spec = compiler_spec + + @contextlib.contextmanager + def compiler_environment(self): + """Sets the environment to run this compiler""" + import spack.schema.environment + import spack.util.module_cmd + + # Avoid modifying os.environ if possible. + environment = self.spec.extra_attributes.get("environment", {}) + modules = self.spec.external_modules or [] + if not self.spec.external_modules and not environment: + yield + return + + # store environment to replace later + backup_env = os.environ.copy() + + try: + # load modules and set env variables + for module in modules: + spack.util.module_cmd.load_module(module) + + # apply other compiler environment changes + spack.schema.environment.parse(environment).apply_modifications() + + yield + finally: + # Restore environment regardless of whether inner code succeeded + os.environ.clear() + os.environ.update(backup_env) + + def _compile_dummy_c_source(self) -> Optional[str]: + import spack.util.executable + + assert self.spec.external, "only external compiler specs are allowed, so far" + compiler_pkg = self.spec.package + cc = compiler_pkg.cc if compiler_pkg.cc else compiler_pkg.cxx + if not cc: # or not self.spec.verbose_flag: + return None + + try: + tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info") + fout = os.path.join(tmpdir, "output") + fin = os.path.join(tmpdir, "main.c") + + with open(fin, "w") as csource: + csource.write( + "int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n" + ) + cc_exe = spack.util.executable.Executable(cc) + + # FIXME (compiler as nodes): this operation should be encapsulated somewhere else + compiler_flags = self.spec.extra_attributes.get("flags", {}) + for flag_type in [ + "cflags" if cc == compiler_pkg.cc else "cxxflags", + "cppflags", + "ldflags", + ]: + current_flags = compiler_flags.get(flag_type, "").strip() + if current_flags: + cc_exe.add_default_arg(*current_flags.split(" ")) + + with self.compiler_environment(): + return cc_exe("-v", fin, "-o", fout, output=str, error=str) + except spack.util.executable.ProcessError as pe: + tty.debug(f"ProcessError: Command exited with non-zero status: {pe.long_message}") + return None + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + def compiler_verbose_output(self): + key = self.spec.dag_hash() + if key not in self._CACHE: + self._CACHE[key] = self._compile_dummy_c_source() + return self._CACHE[key] + + def default_libc(self) -> Optional["spack.spec.Spec"]: + """Determine libc targeted by the compiler from link line""" + output = self.compiler_verbose_output() + + if not output: + return None + + dynamic_linker = spack.util.libc.parse_dynamic_linker(output) + + if not dynamic_linker: + return None + + return spack.util.libc.libc_from_dynamic_linker(dynamic_linker) diff --git a/lib/spack/spack/solver/libc_compatibility.lp b/lib/spack/spack/solver/libc_compatibility.lp index 1b0f3a9cf98..1f3089abc61 100644 --- a/lib/spack/spack/solver/libc_compatibility.lp +++ b/lib/spack/spack/solver/libc_compatibility.lp @@ -8,25 +8,29 @@ % These rules are used on Linux %============================================================================= -% A package cannot be reused if the libc is not compatible with it + +% A package cannot be reused if it needs a libc that is not compatible with the current one error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage) :- provider(node(X, LibcPackage), node(0, "libc")), attr("version", node(X, LibcPackage), LibcVersion), - attr("hash", node(R, ReusedPackage), Hash), - % Libc packages can be reused without the "compatible_libc" attribute - ReusedPackage != LibcPackage, + concrete(node(R, ReusedPackage)), + attr("needs_libc", node(R, ReusedPackage)), not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion). -% A libc is needed in the DAG -:- has_built_packages(), not provider(_, node(0, "libc")). +% In case we don't need a provider for libc, ensure there's at least one compatible libc on the host +error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage) + :- not provider(_, node(0, "libc")), + concrete(node(R, ReusedPackage)), + attr("needs_libc", node(R, ReusedPackage)), + not attr("compatible_libc", node(R, ReusedPackage), _, _). % Non-libc reused specs must be host libc compatible. In case we build packages, we get a % host compatible libc provider from other rules. If nothing is built, there is no libc provider, % since it's pruned from reusable specs, meaning we have to explicitly impose reused specs are host % compatible. -:- attr("hash", node(R, ReusedPackage), Hash), - not provider(node(R, ReusedPackage), node(0, "libc")), - not attr("compatible_libc", node(R, ReusedPackage), _, _). +%:- attr("hash", node(R, ReusedPackage), Hash), +% not provider(node(R, ReusedPackage), node(0, "libc")), +% not attr("compatible_libc", node(R, ReusedPackage), _, _). % The libc provider must be one that a compiler can target :- has_built_packages(), @@ -34,9 +38,3 @@ error(100, "Cannot reuse {0} since we cannot determine libc compatibility", Reus attr("node", node(X, LibcPackage)), attr("version", node(X, LibcPackage), LibcVersion), not host_libc(LibcPackage, LibcVersion). - -% A built node must depend on libc -:- build(PackageNode), - provider(LibcNode, node(0, "libc")), - not external(PackageNode), - not depends_on(PackageNode, LibcNode). diff --git a/lib/spack/spack/solver/requirements.py b/lib/spack/spack/solver/requirements.py index 0fd67a7250a..dd73cc2bf04 100644 --- a/lib/spack/spack/solver/requirements.py +++ b/lib/spack/spack/solver/requirements.py @@ -9,6 +9,7 @@ import spack.config import spack.error import spack.package_base +import spack.repo import spack.spec from spack.config import get_mark_from_yaml_data @@ -194,20 +195,30 @@ def reject_requirement_constraint( self, pkg_name: str, *, constraint: spack.spec.Spec, kind: RequirementKind ) -> bool: """Returns True if a requirement constraint should be rejected""" - if kind == RequirementKind.DEFAULT: - # Requirements under all: are applied only if they are satisfiable considering only - # package rules, so e.g. variants must exist etc. Otherwise, they are rejected. - try: - s = spack.spec.Spec(pkg_name) - s.constrain(constraint) - s.validate_or_raise() - except spack.error.SpackError as e: - tty.debug( - f"[SETUP] Rejecting the default '{constraint}' requirement " - f"on '{pkg_name}': {str(e)}", - level=2, - ) - return True + # If it's a specific package requirement, it's never rejected + if kind != RequirementKind.DEFAULT: + return False + + # Reject default requirements for runtimes and compilers + if pkg_name in spack.repo.PATH.packages_with_tags("runtime"): + return True + + if pkg_name in spack.repo.PATH.packages_with_tags("compiler"): + return True + + # Requirements under all: are applied only if they are satisfiable considering only + # package rules, so e.g. variants must exist etc. Otherwise, they are rejected. + try: + s = spack.spec.Spec(pkg_name) + s.constrain(constraint) + s.validate_or_raise() + except spack.error.SpackError as e: + tty.debug( + f"[SETUP] Rejecting the default '{constraint}' requirement " + f"on '{pkg_name}': {str(e)}", + level=2, + ) + return True return False diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index baf8c44a999..758a85845f3 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -609,6 +609,17 @@ def __repr__(self): def __contains__(self, string): return string in str(self) or string in self.target + def complete_with_defaults(self) -> None: + default_architecture = spack.spec.ArchSpec.default_arch() + if not self.platform: + self.platform = default_architecture.platform + + if not self.os: + self.os = default_architecture.os + + if not self.target: + self.target = default_architecture.target + class CompilerSpec: """The CompilerSpec field represents the compiler or range of compiler @@ -2801,12 +2812,6 @@ def inject_patches_variant(root): @staticmethod def ensure_external_path_if_external(external_spec): if external_spec.external_modules and not external_spec.external_path: - compiler = spack.compilers.compiler_for_spec( - external_spec.compiler, external_spec.architecture - ) - for mod in compiler.modules: - md.load_module(mod) - # Get the path from the module the package can override the default # (this is mostly needed for Cray) pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name) @@ -3004,9 +3009,14 @@ def validate_or_raise(self): spack.repo.PATH.get_pkg_class(spec.fullname) # validate compiler in addition to the package name. - if spec.compiler: - if not spack.compilers.supported(spec.compiler): - raise UnsupportedCompilerError(spec.compiler.name) + if spec.dependencies(deptype="build"): + pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname) + # FIXME (compiler as nodes): raise if we use %gcc on pkgs that do not depend on C + pkg_dependencies = pkg_cls.dependency_names() + if not any(x in pkg_dependencies for x in ("c", "cxx", "fortran")): + raise UnsupportedCompilerError( + f"{spec.fullname} does not depend on 'c', 'cxx, or 'fortran'" + ) # Ensure correctness of variants (if the spec is not virtual) if not spec.virtual: diff --git a/lib/spack/spack/spec_parser.py b/lib/spack/spack/spec_parser.py index bd7778d51df..d3e429ee305 100644 --- a/lib/spack/spack/spec_parser.py +++ b/lib/spack/spack/spec_parser.py @@ -335,6 +335,17 @@ def add_flag(name: str, value: str, propagate: bool): SpecTokens.COMPILER_AND_VERSION ): build_dependency = spack.spec.Spec(self.ctx.current_token.value[1:]) + name_conversion = { + "clang": "llvm", + "oneapi": "intel-oneapi-compilers", + "rocmcc": "llvm-amdgpu", + "intel": "intel-oneapi-compiler-classic", + "arm": "acfl", + } + + if build_dependency.name in name_conversion: + build_dependency.name = name_conversion[build_dependency.name] + initial_spec._add_dependency( build_dependency, depflag=spack.deptypes.BUILD, virtuals=() )