Deprecate config:install_missing_compilers (#46237)

The option config:install_missing_compilers is currently buggy,
and has been for a while. Remove it, since it won't be needed
when compilers are treated as dependencies.

Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
This commit is contained in:
Massimiliano Culpo 2024-09-10 20:02:05 +02:00 committed by GitHub
parent decefe0234
commit ffdfa498bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 19 additions and 391 deletions

View File

@ -115,12 +115,6 @@ config:
suppress_gpg_warnings: false suppress_gpg_warnings: false
# If set to true, Spack will attempt to build any compiler on the spec
# that is not already available. If set to False, Spack will only use
# compilers already configured in compilers.yaml
install_missing_compilers: false
# If set to true, Spack will always check checksums after downloading # If set to true, Spack will always check checksums after downloading
# archives. If false, Spack skips the checksum step. # archives. If false, Spack skips the checksum step.
checksum: true checksum: true

View File

@ -181,10 +181,6 @@ Spec-related modules
:mod:`spack.parser` :mod:`spack.parser`
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs. Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
:mod:`spack.concretize`
Contains :class:`~spack.concretize.Concretizer` implementation,
which allows site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version` :mod:`spack.version`
Implements a simple :class:`~spack.version.Version` class with simple Implements a simple :class:`~spack.version.Version` class with simple
comparison semantics. Also implements :class:`~spack.version.VersionRange` comparison semantics. Also implements :class:`~spack.version.VersionRange`

View File

@ -663,11 +663,7 @@ build the package.
When including a bootstrapping phase as in the example above, the result is that When including a bootstrapping phase as in the example above, the result is that
the bootstrapped compiler packages will be pushed to the binary mirror (and the the bootstrapped compiler packages will be pushed to the binary mirror (and the
local artifacts mirror) before the actual release specs are built. In this case, local artifacts mirror) before the actual release specs are built.
the jobs corresponding to subsequent release specs are configured to
``install_missing_compilers``, so that if spack is asked to install a package
with a compiler it doesn't know about, it can be quickly installed from the
binary mirror first.
Since bootstrapping compilers is optional, those items can be left out of the Since bootstrapping compilers is optional, those items can be left out of the
environment/stack file, and in that case no bootstrapping will be done (only the environment/stack file, and in that case no bootstrapping will be done (only the

View File

@ -19,35 +19,23 @@
import spack.tengine import spack.tengine
import spack.util.path import spack.util.path
CHECK_COMPILER_EXISTENCE = True
class Concretizer:
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
#: Controls whether we check that compiler versions actually exist
#: during concretization. Used for testing and for mirror creation
check_for_compiler_existence = None
def __init__(self):
if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not spack.config.get(
"config:install_missing_compilers", False
)
@contextmanager @contextmanager
def disable_compiler_existence_check(): def disable_compiler_existence_check():
saved = Concretizer.check_for_compiler_existence global CHECK_COMPILER_EXISTENCE
Concretizer.check_for_compiler_existence = False CHECK_COMPILER_EXISTENCE, saved = False, CHECK_COMPILER_EXISTENCE
yield yield
Concretizer.check_for_compiler_existence = saved CHECK_COMPILER_EXISTENCE = saved
@contextmanager @contextmanager
def enable_compiler_existence_check(): def enable_compiler_existence_check():
saved = Concretizer.check_for_compiler_existence global CHECK_COMPILER_EXISTENCE
Concretizer.check_for_compiler_existence = True CHECK_COMPILER_EXISTENCE, saved = True, CHECK_COMPILER_EXISTENCE
yield yield
Concretizer.check_for_compiler_existence = saved CHECK_COMPILER_EXISTENCE = saved
def find_spec(spec, condition, default=None): def find_spec(spec, condition, default=None):

View File

@ -276,52 +276,6 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
dump_packages(pkg.spec, packages_dir) dump_packages(pkg.spec, packages_dir)
def _packages_needed_to_bootstrap_compiler(
compiler: "spack.spec.CompilerSpec", architecture: "spack.spec.ArchSpec", pkgs: list
) -> List[Tuple["spack.package_base.PackageBase", bool]]:
"""
Return a list of packages required to bootstrap `pkg`s compiler
Checks Spack's compiler configuration for a compiler that
matches the package spec.
Args:
compiler: the compiler to bootstrap
architecture: the architecture for which to boostrap the compiler
pkgs: the packages that may need their compiler installed
Return:
list of tuples of packages and a boolean, for concretized compiler-related
packages that need to be installed and bool values specify whether the
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
tty.debug(f"Bootstrapping {compiler} compiler")
compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
if compilers:
return []
dep = spack.compilers.pkg_spec_for_compiler(compiler)
# Set the architecture for the compiler package in a way that allows the
# concretizer to back off if needed for the older bootstrapping compiler
dep.constrain(f"platform={str(architecture.platform)}")
dep.constrain(f"os={str(architecture.os)}")
dep.constrain(f"target={architecture.target.microarchitecture.family.name}:")
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
)
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
packages.append((dep.package, True))
return packages
def _hms(seconds: int) -> str: def _hms(seconds: int) -> str:
""" """
Convert seconds to hours, minutes, seconds Convert seconds to hours, minutes, seconds
@ -967,26 +921,6 @@ def __init__(
if package_id(d) != self.pkg_id if package_id(d) != self.pkg_id
) )
# Handle bootstrapped compiler
#
# The bootstrapped compiler is not a dependency in the spec, but it is
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
strict = spack.concretize.Concretizer().check_for_compiler_existence
if (
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
and not strict
):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
dep.constrain(f"platform={str(arch_spec.platform)}")
dep.constrain(f"os={str(arch_spec.os)}")
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
dep.concretize()
dep_id = package_id(dep)
self.dependencies.add(dep_id)
# List of uninstalled dependencies, which is used to establish # List of uninstalled dependencies, which is used to establish
# the priority of the build task. # the priority of the build task.
# #
@ -1165,53 +1099,6 @@ def __str__(self) -> str:
installed = f"installed ({len(self.installed)}) = {self.installed}" installed = f"installed ({len(self.installed)}) = {self.installed}"
return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}" return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}"
def _add_bootstrap_compilers(
self,
compiler: "spack.spec.CompilerSpec",
architecture: "spack.spec.ArchSpec",
pkgs: List["spack.package_base.PackageBase"],
request: BuildRequest,
all_deps,
) -> None:
"""
Add bootstrap compilers and dependencies to the build queue.
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs: the package list with possible compiler dependencies
request: the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for comp_pkg, is_compiler in packages:
pkgid = package_id(comp_pkg.spec)
if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
elif is_compiler:
# ensure it's queued as a compiler
self._modify_existing_task(pkgid, "compiler", True)
def _modify_existing_task(self, pkgid: str, attr, value) -> None:
"""
Update a task in-place to modify its behavior.
Currently used to update the ``compiler`` field on tasks
that were originally created as a dependency of a compiler,
but are compilers in their own right.
For example, ``intel-oneapi-compilers-classic`` depends on
``intel-oneapi-compilers``, which can cause the latter to be
queued first as a non-compiler, and only later as a compiler.
"""
for i, tup in enumerate(self.build_pq):
key, task = tup
if task.pkg_id == pkgid:
tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2)
setattr(task, attr, value)
self.build_pq[i] = (key, task)
def _add_init_task( def _add_init_task(
self, self,
pkg: "spack.package_base.PackageBase", pkg: "spack.package_base.PackageBase",
@ -1541,42 +1428,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
tty.warn(f"Installation request refused: {str(err)}") tty.warn(f"Installation request refused: {str(err)}")
return return
install_compilers = spack.config.get("config:install_missing_compilers", False)
install_deps = request.install_args.get("install_deps") install_deps = request.install_args.get("install_deps")
# Bootstrap compilers first
if install_deps and install_compilers:
packages_per_compiler: Dict[
"spack.spec.CompilerSpec",
Dict["spack.spec.ArchSpec", List["spack.package_base.PackageBase"]],
] = {}
for dep in request.traverse_dependencies():
dep_pkg = dep.package
compiler = dep_pkg.spec.compiler
arch = dep_pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(dep_pkg)
compiler = request.pkg.spec.compiler
arch = request.pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(request.pkg)
for compiler, archs in packages_per_compiler.items():
for arch, packages in archs.items():
self._add_bootstrap_compilers(compiler, arch, packages, request, all_deps)
if install_deps: if install_deps:
for dep in request.traverse_dependencies(): for dep in request.traverse_dependencies():
@ -1608,10 +1460,6 @@ def _add_tasks(self, request: BuildRequest, all_deps):
fail_fast = bool(request.install_args.get("fail_fast")) fail_fast = bool(request.install_args.get("fail_fast"))
self.fail_fast = self.fail_fast or fail_fast self.fail_fast = self.fail_fast or fail_fast
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
spack.compilers.find_compilers([compiler_search_prefix])
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None: def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
""" """
Perform the installation of the requested spec and/or dependency Perform the installation of the requested spec and/or dependency
@ -1639,8 +1487,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
if use_cache: if use_cache:
if _install_from_cache(pkg, explicit, unsigned): if _install_from_cache(pkg, explicit, unsigned):
self._update_installed(task) self._update_installed(task)
if task.compiler:
self._add_compiler_package_to_config(pkg)
return return
elif cache_only: elif cache_only:
raise InstallError("No binary found when cache-only was specified", pkg=pkg) raise InstallError("No binary found when cache-only was specified", pkg=pkg)
@ -1670,9 +1516,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
# the database, so that we don't need to re-read from file. # the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, explicit=explicit) spack.store.STORE.db.add(pkg.spec, explicit=explicit)
# If a compiler, ensure it is added to the configuration
if task.compiler:
self._add_compiler_package_to_config(pkg)
except spack.build_environment.StopPhase as e: except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to # A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point # stop early from clients, and is not an error at this point
@ -2073,10 +1916,6 @@ def install(self) -> None:
path = spack.util.path.debug_padded_filter(pkg.prefix) path = spack.util.path.debug_padded_filter(pkg.prefix)
_print_installed_pkg(path) _print_installed_pkg(path)
# It's an already installed compiler, add it to the config
if task.compiler:
self._add_compiler_package_to_config(pkg)
else: else:
# At this point we've failed to get a write or a read # At this point we've failed to get a write or a read
# lock, which means another process has taken a write # lock, which means another process has taken a write

View File

@ -75,7 +75,6 @@
"verify_ssl": {"type": "boolean"}, "verify_ssl": {"type": "boolean"},
"ssl_certs": {"type": "string"}, "ssl_certs": {"type": "string"},
"suppress_gpg_warnings": {"type": "boolean"}, "suppress_gpg_warnings": {"type": "boolean"},
"install_missing_compilers": {"type": "boolean"},
"debug": {"type": "boolean"}, "debug": {"type": "boolean"},
"checksum": {"type": "boolean"}, "checksum": {"type": "boolean"},
"deprecated": {"type": "boolean"}, "deprecated": {"type": "boolean"},
@ -102,7 +101,14 @@
"message": "Spack supports only clingo as a concretizer from v0.23. " "message": "Spack supports only clingo as a concretizer from v0.23. "
"The config:concretizer config option is ignored.", "The config:concretizer config option is ignored.",
"error": False, "error": False,
} },
{
"names": ["install_missing_compilers"],
"message": "The config:install_missing_compilers option has been deprecated in "
"Spack v0.23, and is currently ignored. It will be removed from config in "
"Spack v0.25.",
"error": False,
},
], ],
} }
} }

View File

@ -2731,10 +2731,6 @@ def define_runtime_constraints(self):
continue continue
current_libc = compiler.compiler_obj.default_libc current_libc = compiler.compiler_obj.default_libc
# If this is a compiler yet to be built (config:install_missing_compilers:true)
# infer libc from the Python process
if not current_libc and compiler.compiler_obj.cc is None:
current_libc = spack.util.libc.libc_from_current_python_process()
if using_libc_compatibility() and current_libc: if using_libc_compatibility() and current_libc:
recorder("*").depends_on( recorder("*").depends_on(
@ -3156,7 +3152,7 @@ def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerPar
Args: Args:
input_specs: specs to be concretized input_specs: specs to be concretized
""" """
strict = spack.concretize.Concretizer().check_for_compiler_existence strict = spack.concretize.CHECK_COMPILER_EXISTENCE
default_os = str(spack.platforms.host().default_os) default_os = str(spack.platforms.host().default_os)
default_target = str(archspec.cpu.host().family) default_target = str(archspec.cpu.host().family)
for s in traverse.traverse_nodes(input_specs): for s in traverse.traverse_nodes(input_specs):

View File

@ -1319,7 +1319,7 @@ node_compiler_weight(node(ID, Package), 100)
not compiler_weight(CompilerID, _). not compiler_weight(CompilerID, _).
% For the time being, be strict and reuse only if the compiler match one we have on the system % For the time being, be strict and reuse only if the compiler match one we have on the system
error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version) error(100, "Compiler {1}@{2} requested for {0} cannot be found.", Package, Compiler, Version)
:- attr("node_compiler_version", node(ID, Package), Compiler, Version), :- attr("node_compiler_version", node(ID, Package), Compiler, Version),
not node_compiler(node(ID, Package), _). not node_compiler(node(ID, Package), _).

View File

@ -19,7 +19,6 @@
import spack.cmd.common.arguments import spack.cmd.common.arguments
import spack.cmd.install import spack.cmd.install
import spack.compilers as compilers
import spack.config import spack.config
import spack.environment as ev import spack.environment as ev
import spack.hash_types as ht import spack.hash_types as ht
@ -29,7 +28,7 @@
from spack.error import SpackError from spack.error import SpackError
from spack.main import SpackCommand from spack.main import SpackCommand
from spack.parser import SpecSyntaxError from spack.parser import SpecSyntaxError
from spack.spec import CompilerSpec, Spec from spack.spec import Spec
install = SpackCommand("install") install = SpackCommand("install")
env = SpackCommand("env") env = SpackCommand("env")
@ -916,68 +915,6 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
assert "foo: No such file or directory" in content assert "foo: No such file or directory" in content
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
def test_compiler_bootstrap(
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch
):
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
spack.config.set("config:install_missing_compilers", True)
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
# Test succeeds if it does not raise an error
install("pkg-a%gcc@=12.0")
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
def test_compiler_bootstrap_from_binary_mirror(
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch, tmpdir
):
"""
Make sure installing compiler from buildcache registers compiler
"""
# Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join("mirror_dir")
mirror_url = "file://{0}".format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
install("gcc@=10.2.0")
# Put installed compiler in the buildcache
buildcache("push", "-u", "-f", mirror_dir.strpath, "gcc@10.2.0")
# Now uninstall the compiler
uninstall("-y", "gcc@10.2.0")
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
spack.config.set("config:install_missing_compilers", True)
assert CompilerSpec("gcc@=10.2.0") not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
mirror("add", "test-mirror", mirror_url)
# Now make sure that when the compiler is installed from binary mirror,
# it also gets configured as a compiler. Test succeeds if it does not
# raise an error
install("--no-check-signature", "--cache-only", "--only", "dependencies", "pkg-b%gcc@=10.2.0")
install("--no-cache", "--only", "package", "pkg-b%gcc@10.2.0")
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
@pytest.mark.regression("16221")
def test_compiler_bootstrap_already_installed(
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch
):
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
spack.config.set("config:install_missing_compilers", True)
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
# Test succeeds if it does not raise an error
install("gcc@=12.0")
install("pkg-a%gcc@=12.0")
def test_install_fails_no_args(tmpdir): def test_install_fails_no_args(tmpdir):
# ensure no spack.yaml in directory # ensure no spack.yaml in directory
with tmpdir.as_cwd(): with tmpdir.as_cwd():

View File

@ -2375,26 +2375,6 @@ def test_externals_with_platform_explicitly_set(self, tmp_path):
s = Spec("mpich").concretized() s = Spec("mpich").concretized()
assert s.external assert s.external
@pytest.mark.regression("43875")
def test_concretize_missing_compiler(self, mutable_config, monkeypatch):
"""Tests that Spack can concretize a spec with a missing compiler when the
option is active.
"""
def _default_libc(self):
if self.cc is None:
return None
return Spec("glibc@=2.28")
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", property(_default_libc))
monkeypatch.setattr(
spack.util.libc, "libc_from_current_python_process", lambda: Spec("glibc@=2.28")
)
mutable_config.set("config:install_missing_compilers", True)
s = Spec("pkg-a %gcc@=13.2.0").concretized()
assert s.satisfies("%gcc@13.2.0")
@pytest.mark.regression("43267") @pytest.mark.regression("43267")
def test_spec_with_build_dep_from_json(self, tmp_path): def test_spec_with_build_dep_from_json(self, tmp_path):
"""Tests that we can correctly concretize a spec, when we express its dependency as a """Tests that we can correctly concretize a spec, when we express its dependency as a

View File

@ -12,8 +12,6 @@
import py import py
import pytest import pytest
import archspec.cpu
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lock as ulk import llnl.util.lock as ulk
import llnl.util.tty as tty import llnl.util.tty as tty
@ -435,76 +433,6 @@ def test_fake_install(install_mockery):
assert os.path.isdir(pkg.prefix.lib) assert os.path.isdir(pkg.prefix.lib)
def test_packages_needed_to_bootstrap_compiler_none(install_mockery):
spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
packages = inst._packages_needed_to_bootstrap_compiler(
spec.compiler, spec.architecture, [spec.package]
)
assert not packages
@pytest.mark.xfail(reason="fails when assuming Spec.package can only be called on concrete specs")
def test_packages_needed_to_bootstrap_compiler_packages(install_mockery, monkeypatch):
spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
def _conc_spec(compiler):
return spack.spec.Spec("pkg-a").concretized()
# Ensure we can get past functions that are precluding obtaining
# packages.
monkeypatch.setattr(spack.compilers, "compilers_for_spec", _none)
monkeypatch.setattr(spack.compilers, "pkg_spec_for_compiler", _conc_spec)
monkeypatch.setattr(spack.spec.Spec, "concretize", _noop)
packages = inst._packages_needed_to_bootstrap_compiler(
spec.compiler, spec.architecture, [spec.package]
)
assert packages
def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, monkeypatch):
spec = spack.spec.Spec("trivial-install-test-package").concretized()
installer = inst.PackageInstaller([spec.package], {})
# Add a task to the queue
installer._add_init_task(spec.package, installer.build_requests[0], False, {})
# monkeypatch to make the list of compilers be what we test
def fake_package_list(compiler, architecture, pkgs):
return [(spec.package, True)]
monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", fake_package_list)
installer._add_bootstrap_compilers("fake", "fake", "fake", None, {})
# Check that the only task is now a compiler task
assert len(installer.build_pq) == 1
assert installer.build_pq[0][1].compiler
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64",
reason="OneAPI compiler is not supported on other architectures",
)
def test_bootstrapping_compilers_with_different_names_from_spec(
install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host
):
"""Tests that, when we bootstrap '%oneapi' we can translate it to the
'intel-oneapi-compilers' package.
"""
with spack.config.override("config:install_missing_compilers", True):
with spack.concretize.disable_compiler_existence_check():
spec = spack.spec.Spec("trivial-install-test-package%oneapi@=22.2.0").concretized()
spec.package.do_install()
assert (
spack.spec.CompilerSpec("oneapi@=22.2.0") in spack.compilers.all_compiler_specs()
)
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages): def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
"""Test happy path for dump_packages with dependencies.""" """Test happy path for dump_packages with dependencies."""
@ -696,26 +624,6 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
assert inst.package_id(dep) in installer.installed assert inst.package_id(dep) in installer.installed
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
from collections import defaultdict
def _pkgs(compiler, architecture, pkgs):
spec = spack.spec.Spec("mpi").concretized()
return [(spec.package, True)]
installer = create_installer(["trivial-install-test-package"], {})
request = installer.build_requests[0]
all_deps = defaultdict(set)
monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", _pkgs)
installer._add_bootstrap_compilers("fake", "fake", [request.pkg], request, all_deps)
ids = list(installer.build_tasks)
assert len(ids) == 1
task = installer.build_tasks[ids[0]]
assert task.compiler
def test_prepare_for_install_on_installed(install_mockery, monkeypatch): def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
"""Test of _prepare_for_install's early return for installed task path.""" """Test of _prepare_for_install's early return for installed task path."""
installer = create_installer(["dependent-install"], {}) installer = create_installer(["dependent-install"], {})
@ -729,18 +637,6 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
installer._prepare_for_install(task) installer._prepare_for_install(task)
def test_installer_init_requests(install_mockery):
"""Test of installer initial requests."""
spec_name = "dependent-install"
with spack.config.override("config:install_missing_compilers", True):
installer = create_installer([spec_name], {})
# There is only one explicit request in this case
assert len(installer.build_requests) == 1
request = installer.build_requests[0]
assert request.pkg.name == spec_name
def test_install_task_use_cache(install_mockery, monkeypatch): def test_install_task_use_cache(install_mockery, monkeypatch):
installer = create_installer(["trivial-install-test-package"], {}) installer = create_installer(["trivial-install-test-package"], {})
request = installer.build_requests[0] request = installer.build_requests[0]