untangle spack.config / spack.util.cpus & spack.spec (#46427)

This commit is contained in:
Harmen Stoppels 2024-09-17 17:06:00 +02:00 committed by GitHub
parent 623c5a4d24
commit 3ded2fc9c5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 66 additions and 74 deletions

View File

@ -77,7 +77,6 @@
from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications,
@ -559,7 +558,7 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.

View File

@ -11,9 +11,9 @@
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
from spack.config import determine_number_of_jobs
from spack.directives import build_system, extends, maintainers
from spack.package_base import PackageBase
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError

View File

@ -58,12 +58,10 @@
import spack.schema.repos
import spack.schema.upstreams
import spack.schema.view
import spack.spec
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
import spack.util.web as web_util
from spack.error import SpecSyntaxError
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
@ -1710,25 +1708,37 @@ def get_mark_from_yaml_data(obj):
return mark
def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
"""Parse a spec from YAML and add file/line info to errors, if it's available.
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
add file/line information for debugging using file/line annotations from the string.
Arguments:
string: a string representing a ``Spec`` from config YAML.
def determine_number_of_jobs(
*,
parallel: bool = False,
max_cpus: int = cpus_available(),
config: Optional[Configuration] = None,
) -> int:
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel: true when package supports parallel builds
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
config: configuration object (defaults to global config)
"""
if not parallel:
return 1
cfg = config or CONFIG
# Command line overrides all
try:
spec = spack.spec.Spec(string)
return spec
except SpecSyntaxError as e:
mark = get_mark_from_yaml_data(string)
if mark:
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
raise SpecSyntaxError(msg) from e
raise e
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
if command_line is not None:
return command_line
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))
class ConfigSectionError(spack.error.ConfigError):

View File

@ -1648,7 +1648,7 @@ def _concretize_separately(self, tests=False):
# Solve the environment in parallel on Linux
start = time.time()
num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
# TODO: support parallel concretization on macOS and Windows
msg = "Starting concretization"

View File

@ -75,6 +75,7 @@
from spack.build_systems.waf import WafPackage
from spack.build_systems.xorg import XorgPackage
from spack.builder import run_after, run_before
from spack.config import determine_number_of_jobs
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import *
from spack.install_test import (
@ -99,7 +100,6 @@
on_package_attributes,
)
from spack.spec import InvalidSpecDetected, Spec
from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
from spack.variant import (

View File

@ -31,7 +31,6 @@
import spack.compilers
import spack.concretize
import spack.config
import spack.config as sc
import spack.deptypes as dt
import spack.environment as ev
import spack.error
@ -49,6 +48,8 @@
import spack.version as vn
import spack.version.git_ref_lookup
from spack import traverse
from spack.config import get_mark_from_yaml_data
from spack.error import SpecSyntaxError
from .core import (
AspFunction,
@ -2923,6 +2924,26 @@ def value(self) -> str:
return "".join(self.asp_problem)
def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
"""Parse a spec from YAML and add file/line info to errors, if it's available.
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
add file/line information for debugging using file/line annotations from the string.
Arguments:
string: a string representing a ``Spec`` from config YAML.
"""
try:
return spack.spec.Spec(string)
except SpecSyntaxError as e:
mark = get_mark_from_yaml_data(string)
if mark:
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
raise SpecSyntaxError(msg) from e
raise e
class RequirementParser:
"""Parses requirements from package.py files and configuration, and returns rules."""
@ -3008,11 +3029,11 @@ def rules_from_conflict(self, pkg: "spack.package_base.PackageBase") -> List[Req
def _parse_prefer_conflict_item(self, item):
# The item is either a string or an object with at least a "spec" attribute
if isinstance(item, str):
spec = sc.parse_spec_from_yaml_string(item)
spec = parse_spec_from_yaml_string(item)
condition = spack.spec.Spec()
message = None
else:
spec = sc.parse_spec_from_yaml_string(item["spec"])
spec = parse_spec_from_yaml_string(item["spec"])
condition = spack.spec.Spec(item.get("when"))
message = item.get("message")
return spec, condition, message
@ -3053,10 +3074,10 @@ def _rules_from_requirements(
# validate specs from YAML first, and fail with line numbers if parsing fails.
constraints = [
sc.parse_spec_from_yaml_string(constraint) for constraint in constraints
parse_spec_from_yaml_string(constraint) for constraint in constraints
]
when_str = requirement.get("when")
when = sc.parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
constraints = [
x

View File

@ -22,7 +22,6 @@
from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix
from spack.context import Context
from spack.paths import build_env_path
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@ -483,7 +482,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
assert s["pkg-a"].package.module.make_jobs == 1
spack.build_environment.set_package_py_globals(s["pkg-b"].package, context=Context.BUILD)
assert s["pkg-b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
assert s["pkg-b"].package.module.make_jobs == spack.config.determine_number_of_jobs(
parallel=s["pkg-b"].package.parallel
)
@ -516,7 +515,7 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
def test_build_jobs_sequential_is_sequential():
assert (
determine_number_of_jobs(
spack.config.determine_number_of_jobs(
parallel=False,
max_cpus=8,
config=spack.config.Configuration(
@ -530,7 +529,7 @@ def test_build_jobs_sequential_is_sequential():
def test_build_jobs_command_line_overrides():
assert (
determine_number_of_jobs(
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=1,
config=spack.config.Configuration(
@ -541,7 +540,7 @@ def test_build_jobs_command_line_overrides():
== 10
)
assert (
determine_number_of_jobs(
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=100,
config=spack.config.Configuration(
@ -555,7 +554,7 @@ def test_build_jobs_command_line_overrides():
def test_build_jobs_defaults():
assert (
determine_number_of_jobs(
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
@ -565,7 +564,7 @@ def test_build_jobs_defaults():
== 1
)
assert (
determine_number_of_jobs(
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(

View File

@ -5,9 +5,6 @@
import multiprocessing
import os
from typing import Optional
import spack.config
def cpus_available():
@ -21,36 +18,3 @@ def cpus_available():
return len(os.sched_getaffinity(0)) # novermin
except Exception:
return multiprocessing.cpu_count()
def determine_number_of_jobs(
*,
parallel: bool = False,
max_cpus: int = cpus_available(),
config: Optional["spack.config.Configuration"] = None,
) -> int:
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel: true when package supports parallel builds
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
config: configuration object (defaults to global config)
"""
if not parallel:
return 1
cfg = config or spack.config.CONFIG
# Command line overrides all
try:
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
if command_line is not None:
return command_line
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))

View File

@ -9,7 +9,7 @@
import traceback
from typing import Optional
from spack.util.cpus import determine_number_of_jobs
from spack.config import determine_number_of_jobs
class ErrorFromWorker:

View File

@ -8,7 +8,6 @@
from llnl.util import tty
from spack.package import *
from spack.util.cpus import determine_number_of_jobs
class Amdlibm(SConsPackage):