Deprecate make, gmake and ninja globals in package API (#48450)

Instead, depends_on("gmake", type="build") should be used, which makes the global available through `setup_dependent_package`.
This commit is contained in:
Massimiliano Culpo 2025-01-11 22:43:22 +01:00 committed by GitHub
parent a66ab9cc6c
commit 8ab6f33eb6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 158 additions and 48 deletions

View File

@ -44,7 +44,19 @@
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
from multiprocessing.connection import Connection from multiprocessing.connection import Connection
from typing import Callable, Dict, List, Optional, Set, Tuple from typing import (
Callable,
Dict,
List,
Optional,
Sequence,
Set,
TextIO,
Tuple,
Type,
Union,
overload,
)
import archspec.cpu import archspec.cpu
@ -146,48 +158,128 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
class MakeExecutable(Executable): class MakeExecutable(Executable):
"""Special callable executable object for make so the user can specify """Special callable executable object for make so the user can specify parallelism options
parallelism options on a per-invocation basis. Specifying on a per-invocation basis.
'parallel' to the call will override whatever the package's
global setting is, so you can either default to true or false and
override particular calls. Specifying 'jobs_env' to a particular
call will name an environment variable which will be set to the
parallelism level (without affecting the normal invocation with
-j).
""" """
def __init__(self, name, jobs, **kwargs): def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
supports_jobserver = kwargs.pop("supports_jobserver", True) super().__init__(name)
super().__init__(name, **kwargs)
self.supports_jobserver = supports_jobserver self.supports_jobserver = supports_jobserver
self.jobs = jobs self.jobs = jobs
def __call__(self, *args, **kwargs): @overload
"""parallel, and jobs_env from kwargs are swallowed and used here; def __call__(
remaining arguments are passed through to the superclass. self,
""" *args: str,
parallel = kwargs.pop("parallel", True) parallel: bool = ...,
jobs_env = kwargs.pop("jobs_env", None) jobs_env: Optional[str] = ...,
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False) jobs_env_supports_jobserver: bool = ...,
fail_on_error: bool = ...,
ignore_errors: Union[int, Sequence[int]] = ...,
ignore_quotes: Optional[bool] = ...,
timeout: Optional[int] = ...,
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
input: Optional[TextIO] = ...,
output: Union[Optional[TextIO], str] = ...,
error: Union[Optional[TextIO], str] = ...,
_dump_env: Optional[Dict[str, str]] = ...,
) -> None: ...
@overload
def __call__(
self,
*args: str,
parallel: bool = ...,
jobs_env: Optional[str] = ...,
jobs_env_supports_jobserver: bool = ...,
fail_on_error: bool = ...,
ignore_errors: Union[int, Sequence[int]] = ...,
ignore_quotes: Optional[bool] = ...,
timeout: Optional[int] = ...,
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
input: Optional[TextIO] = ...,
output: Union[Type[str], Callable] = ...,
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
_dump_env: Optional[Dict[str, str]] = ...,
) -> str: ...
@overload
def __call__(
self,
*args: str,
parallel: bool = ...,
jobs_env: Optional[str] = ...,
jobs_env_supports_jobserver: bool = ...,
fail_on_error: bool = ...,
ignore_errors: Union[int, Sequence[int]] = ...,
ignore_quotes: Optional[bool] = ...,
timeout: Optional[int] = ...,
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
input: Optional[TextIO] = ...,
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
error: Union[Type[str], Callable] = ...,
_dump_env: Optional[Dict[str, str]] = ...,
) -> str: ...
def __call__(
self,
*args: str,
parallel: bool = True,
jobs_env: Optional[str] = None,
jobs_env_supports_jobserver: bool = False,
**kwargs,
) -> Optional[str]:
"""Runs this "make" executable in a subprocess.
Args:
parallel: if False, parallelism is disabled
jobs_env: environment variable that will be set to the current level of parallelism
jobs_env_supports_jobserver: whether the jobs env supports a job server
For all the other **kwargs, refer to the base class.
"""
jobs = get_effective_jobs( jobs = get_effective_jobs(
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
) )
if jobs is not None: if jobs is not None:
args = ("-j{0}".format(jobs),) + args args = (f"-j{jobs}",) + args
if jobs_env: if jobs_env:
# Caller wants us to set an environment variable to # Caller wants us to set an environment variable to control the parallelism
# control the parallelism.
jobs_env_jobs = get_effective_jobs( jobs_env_jobs = get_effective_jobs(
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
) )
if jobs_env_jobs is not None: if jobs_env_jobs is not None:
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)} extra_env = kwargs.setdefault("extra_env", {})
extra_env.update({jobs_env: str(jobs_env_jobs)})
return super().__call__(*args, **kwargs) return super().__call__(*args, **kwargs)
class UndeclaredDependencyError(spack.error.SpackError):
"""Raised if a dependency is invoking an executable through a module global, without
declaring a dependency on it.
"""
class DeprecatedExecutable:
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
self.pkg = pkg
self.exe = exe
self.exe_pkg = exe_pkg
def __call__(self, *args, **kwargs):
raise UndeclaredDependencyError(
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
)
def add_default_env(self, key: str, value: str):
self.__call__()
def clean_environment(): def clean_environment():
# Stuff in here sanitizes the build environment to eliminate # Stuff in here sanitizes the build environment to eliminate
# anything the user has set that may interfere. We apply it immediately # anything the user has set that may interfere. We apply it immediately
@ -621,10 +713,9 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# TODO: make these build deps that can be installed if not found. module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
module.make = MakeExecutable("make", jobs) module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
module.gmake = MakeExecutable("gmake", jobs) module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools # TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their # for now there is no entrypoint for builders to define these on their
# own # own

View File

@ -27,6 +27,7 @@ class QMakePackage(spack.package_base.PackageBase):
build_system("qmake") build_system("qmake")
depends_on("qmake", type="build", when="build_system=qmake") depends_on("qmake", type="build", when="build_system=qmake")
depends_on("gmake", type="build")
@spack.builder.builder("qmake") @spack.builder.builder("qmake")

View File

@ -1819,12 +1819,6 @@ def _has_make_target(self, target):
Returns: Returns:
bool: True if 'target' is found, else False bool: True if 'target' is found, else False
""" """
# Prevent altering LC_ALL for 'make' outside this function
make = copy.deepcopy(self.module.make)
# Use English locale for missing target message comparison
make.add_default_env("LC_ALL", "C")
# Check if we have a Makefile # Check if we have a Makefile
for makefile in ["GNUmakefile", "Makefile", "makefile"]: for makefile in ["GNUmakefile", "Makefile", "makefile"]:
if os.path.exists(makefile): if os.path.exists(makefile):
@ -1833,6 +1827,12 @@ def _has_make_target(self, target):
tty.debug("No Makefile found in the build directory") tty.debug("No Makefile found in the build directory")
return False return False
# Prevent altering LC_ALL for 'make' outside this function
make = copy.deepcopy(self.module.make)
# Use English locale for missing target message comparison
make.add_default_env("LC_ALL", "C")
# Check if 'target' is a valid target. # Check if 'target' is a valid target.
# #
# `make -n target` performs a "dry run". It prints the commands that # `make -n target` performs a "dry run". It prints the commands that

View File

@ -20,7 +20,7 @@
import spack.paths import spack.paths
import spack.platforms import spack.platforms
import spack.platforms.test import spack.platforms.test
from spack.build_environment import ChildError, setup_package from spack.build_environment import ChildError, MakeExecutable, setup_package
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
from spack.spec import Spec from spack.spec import Spec
from spack.util.executable import which from spack.util.executable import which
@ -29,10 +29,12 @@
@pytest.fixture() @pytest.fixture()
def concretize_and_setup(default_mock_concretization): def concretize_and_setup(default_mock_concretization, monkeypatch):
def _func(spec_str): def _func(spec_str):
s = default_mock_concretization(spec_str) s = default_mock_concretization(spec_str)
setup_package(s.package, False) setup_package(s.package, False)
monkeypatch.setattr(s.package.module, "make", MakeExecutable("make", jobs=1))
monkeypatch.setattr(s.package.module, "ninja", MakeExecutable("ninja", jobs=1))
return s return s
return _func return _func

View File

@ -29,31 +29,31 @@ def make_executable(tmp_path, working_env):
def test_make_normal(): def test_make_normal():
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
assert make(output=str).strip() == "-j8" assert make(output=str).strip() == "-j8"
assert make("install", output=str).strip() == "-j8 install" assert make("install", output=str).strip() == "-j8 install"
def test_make_explicit(): def test_make_explicit():
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
assert make(parallel=True, output=str).strip() == "-j8" assert make(parallel=True, output=str).strip() == "-j8"
assert make("install", parallel=True, output=str).strip() == "-j8 install" assert make("install", parallel=True, output=str).strip() == "-j8 install"
def test_make_one_job(): def test_make_one_job():
make = MakeExecutable("make", 1) make = MakeExecutable("make", jobs=1)
assert make(output=str).strip() == "-j1" assert make(output=str).strip() == "-j1"
assert make("install", output=str).strip() == "-j1 install" assert make("install", output=str).strip() == "-j1 install"
def test_make_parallel_false(): def test_make_parallel_false():
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
assert make(parallel=False, output=str).strip() == "-j1" assert make(parallel=False, output=str).strip() == "-j1"
assert make("install", parallel=False, output=str).strip() == "-j1 install" assert make("install", parallel=False, output=str).strip() == "-j1 install"
def test_make_parallel_disabled(monkeypatch): def test_make_parallel_disabled(monkeypatch):
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true") monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
assert make(output=str).strip() == "-j1" assert make(output=str).strip() == "-j1"
@ -74,7 +74,7 @@ def test_make_parallel_disabled(monkeypatch):
def test_make_parallel_precedence(monkeypatch): def test_make_parallel_precedence(monkeypatch):
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
# These should work # These should work
monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true") monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
@ -96,21 +96,21 @@ def test_make_parallel_precedence(monkeypatch):
def test_make_jobs_env(): def test_make_jobs_env():
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
dump_env = {} dump_env = {}
assert make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip() == "-j8" assert make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip() == "-j8"
assert dump_env["MAKE_PARALLELISM"] == "8" assert dump_env["MAKE_PARALLELISM"] == "8"
def test_make_jobserver(monkeypatch): def test_make_jobserver(monkeypatch):
make = MakeExecutable("make", 8) make = MakeExecutable("make", jobs=8)
monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y") monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
assert make(output=str).strip() == "" assert make(output=str).strip() == ""
assert make(parallel=False, output=str).strip() == "-j1" assert make(parallel=False, output=str).strip() == "-j1"
def test_make_jobserver_not_supported(monkeypatch): def test_make_jobserver_not_supported(monkeypatch):
make = MakeExecutable("make", 8, supports_jobserver=False) make = MakeExecutable("make", jobs=8, supports_jobserver=False)
monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y") monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
# Currently fallback on default job count, Maybe it should force -j1 ? # Currently fallback on default job count, Maybe it should force -j1 ?
assert make(output=str).strip() == "-j8" assert make(output=str).strip() == "-j8"

View File

@ -16,3 +16,8 @@ class Gmake(Package):
def do_stage(self): def do_stage(self):
mkdirp(self.stage.source_path) mkdirp(self.stage.source_path)
def setup_dependent_package(self, module, dspec):
module.make = MakeExecutable(
"make", jobs=determine_number_of_jobs(parallel=dspec.package.parallel)
)

View File

@ -108,7 +108,9 @@ def pgo_train(self):
# Run spack solve --fresh hdf5 with instrumented clingo. # Run spack solve --fresh hdf5 with instrumented clingo.
python_runtime_env = EnvironmentModifications() python_runtime_env = EnvironmentModifications()
python_runtime_env.extend( python_runtime_env.extend(
spack.user_environment.environment_modifications_for_specs(self.spec) spack.user_environment.environment_modifications_for_specs(
self.spec, set_package_py_globals=False
)
) )
python_runtime_env.unset("SPACK_ENV") python_runtime_env.unset("SPACK_ENV")
python_runtime_env.unset("SPACK_PYTHON") python_runtime_env.unset("SPACK_PYTHON")

View File

@ -28,6 +28,7 @@ class Fltk(Package):
depends_on("c", type="build") # generated depends_on("c", type="build") # generated
depends_on("cxx", type="build") # generated depends_on("cxx", type="build") # generated
depends_on("gmake", type="build")
depends_on("libx11") depends_on("libx11")

View File

@ -96,5 +96,6 @@ def install(self, spec, prefix):
def setup_dependent_package(self, module, dspec): def setup_dependent_package(self, module, dspec):
module.make = MakeExecutable( module.make = MakeExecutable(
self.spec.prefix.bin.make, determine_number_of_jobs(parallel=dspec.package.parallel) self.spec.prefix.bin.make,
jobs=determine_number_of_jobs(parallel=dspec.package.parallel),
) )

View File

@ -158,6 +158,7 @@ class Mfem(Package, CudaPackage, ROCmPackage):
) )
depends_on("cxx", type="build") # generated depends_on("cxx", type="build") # generated
depends_on("gmake", type="build")
variant("static", default=True, description="Build static library") variant("static", default=True, description="Build static library")
variant("shared", default=False, description="Build shared library") variant("shared", default=False, description="Build shared library")

View File

@ -97,6 +97,6 @@ def setup_dependent_package(self, module, dspec):
module.ninja = MakeExecutable( module.ninja = MakeExecutable(
which_string(name, path=[self.spec.prefix.bin], required=True), which_string(name, path=[self.spec.prefix.bin], required=True),
determine_number_of_jobs(parallel=dspec.package.parallel), jobs=determine_number_of_jobs(parallel=dspec.package.parallel),
supports_jobserver=True, # This fork supports jobserver supports_jobserver=True, # This fork supports jobserver
) )

View File

@ -104,6 +104,6 @@ def setup_dependent_package(self, module, dspec):
module.ninja = MakeExecutable( module.ninja = MakeExecutable(
which_string(name, path=[self.spec.prefix.bin], required=True), which_string(name, path=[self.spec.prefix.bin], required=True),
determine_number_of_jobs(parallel=dspec.package.parallel), jobs=determine_number_of_jobs(parallel=dspec.package.parallel),
supports_jobserver=self.spec.version == ver("kitware"), supports_jobserver=self.spec.version == ver("kitware"),
) )

View File

@ -208,6 +208,8 @@ class Qt(Package):
depends_on("assimp@5.0.0:5", when="@5.5:+opengl") depends_on("assimp@5.0.0:5", when="@5.5:+opengl")
depends_on("sqlite+column_metadata", when="+sql", type=("build", "run")) depends_on("sqlite+column_metadata", when="+sql", type=("build", "run"))
depends_on("inputproto", when="@:5.8") depends_on("inputproto", when="@:5.8")
depends_on("gmake", type="build")
for plat in ["linux", "freebsd"]: for plat in ["linux", "freebsd"]:
with when(f"platform={plat} +gui"): with when(f"platform={plat} +gui"):
depends_on("fontconfig") depends_on("fontconfig")

View File

@ -190,6 +190,7 @@ class RocmOpenmpExtras(Package):
depends_on("c", type="build") # generated depends_on("c", type="build") # generated
depends_on("cxx", type="build") # generated depends_on("cxx", type="build") # generated
depends_on("fortran", type="build") # generated depends_on("fortran", type="build") # generated
depends_on("gmake", type="build")
variant("asan", default=False, description="Build with address-sanitizer enabled or disabled") variant("asan", default=False, description="Build with address-sanitizer enabled or disabled")

View File

@ -116,6 +116,7 @@ class Slepc(Package, CudaPackage, ROCmPackage):
# NOTE: make sure PETSc and SLEPc use the same python. # NOTE: make sure PETSc and SLEPc use the same python.
depends_on("python@2.6:2.8,3.4:", type="build") depends_on("python@2.6:2.8,3.4:", type="build")
depends_on("gmake", type="build")
# Cannot mix release and development versions of SLEPc and PETSc: # Cannot mix release and development versions of SLEPc and PETSc:
depends_on("petsc@main", when="@main") depends_on("petsc@main", when="@main")

View File

@ -66,6 +66,8 @@ class SuiteSparse(Package):
# Support for TBB has been removed in version 5.11 # Support for TBB has been removed in version 5.11
variant("tbb", default=False, description="Build with Intel TBB", when="@4.5.3:5.10") variant("tbb", default=False, description="Build with Intel TBB", when="@4.5.3:5.10")
depends_on("gmake", type="build")
depends_on("blas") depends_on("blas")
depends_on("lapack") depends_on("lapack")
depends_on("cuda", when="+cuda") depends_on("cuda", when="+cuda")