spack test run: add a --timeout argument (#49839)

* TestSuite: add type hints
* spack test run: add a --timeout argument
* pipelines: allow 2 minutes to run tests
* Fix docstrings, increase maximum pipelines time for tests to 5 mins.
* Use SIGTERM first, SIGKILL shortly after
* Add unit-tests for "start_build_process"

---------

Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
This commit is contained in:
Massimiliano Culpo 2025-04-05 00:48:48 +02:00 committed by GitHub
parent 87bbcefba9
commit 7e6a216d33
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 228 additions and 113 deletions

View File

@ -36,9 +36,11 @@
import multiprocessing import multiprocessing
import os import os
import re import re
import signal
import sys import sys
import traceback import traceback
import types import types
import warnings
from collections import defaultdict from collections import defaultdict
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
@ -1216,15 +1218,45 @@ def _setup_pkg_and_run(
input_pipe.close() input_pipe.close()
def start_build_process(pkg, function, kwargs): class BuildProcess:
def __init__(self, *, target, args) -> None:
self.p = multiprocessing.Process(target=target, args=args)
def start(self) -> None:
self.p.start()
def is_alive(self) -> bool:
return self.p.is_alive()
def join(self, *, timeout: Optional[int] = None):
self.p.join(timeout=timeout)
def terminate(self):
# Opportunity for graceful termination
self.p.terminate()
self.p.join(timeout=1)
# If the process didn't gracefully terminate, forcefully kill
if self.p.is_alive():
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
os.kill(self.p.pid, signal.SIGKILL)
self.p.join()
@property
def exitcode(self):
return self.p.exitcode
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
"""Create a child process to do part of a spack build. """Create a child process to do part of a spack build.
Args: Args:
pkg (spack.package_base.PackageBase): package whose environment we should set up the pkg (spack.package_base.PackageBase): package whose environment we should set up the
child process for. child process for.
function (typing.Callable): argless function to run in the child function (typing.Callable): argless function to run in the child process.
process. timeout: maximum time allowed to finish the execution of function
Usage:: Usage::
@ -1252,14 +1284,14 @@ def child_fun():
# Forward sys.stdin when appropriate, to allow toggling verbosity # Forward sys.stdin when appropriate, to allow toggling verbosity
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"): if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
input_fd = Connection(os.dup(sys.stdin.fileno())) input_fd = Connection(os.dup(sys.stdin.fileno()))
mflags = os.environ.get("MAKEFLAGS", False) mflags = os.environ.get("MAKEFLAGS")
if mflags: if mflags is not None:
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags) m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
if m: if m:
jobserver_fd1 = Connection(int(m.group(1))) jobserver_fd1 = Connection(int(m.group(1)))
jobserver_fd2 = Connection(int(m.group(2))) jobserver_fd2 = Connection(int(m.group(2)))
p = multiprocessing.Process( p = BuildProcess(
target=_setup_pkg_and_run, target=_setup_pkg_and_run,
args=( args=(
serialized_pkg, serialized_pkg,
@ -1293,14 +1325,17 @@ def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal" typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}" return f"{typ} {abs(p.exitcode)}"
p.join(timeout=timeout)
if p.is_alive():
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
p.terminate()
p.join()
try: try:
child_result = read_pipe.recv() child_result = read_pipe.recv()
except EOFError: except EOFError:
p.join()
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})") raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
p.join()
# If returns a StopPhase, raise it # If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase): if isinstance(child_result, spack.error.StopPhase):
# do not print # do not print

View File

@ -14,7 +14,7 @@
import tempfile import tempfile
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Set, Union from typing import Callable, Dict, List, Optional, Set, Union
from urllib.request import Request from urllib.request import Request
import llnl.path import llnl.path
@ -1294,35 +1294,34 @@ def display_broken_spec_messages(base_url, hashes):
tty.msg(msg) tty.msg(msg)
def run_standalone_tests(**kwargs): def run_standalone_tests(
*,
cdash: Optional[CDashHandler] = None,
fail_fast: bool = False,
log_file: Optional[str] = None,
job_spec: Optional[spack.spec.Spec] = None,
repro_dir: Optional[str] = None,
timeout: Optional[int] = None,
):
"""Run stand-alone tests on the current spec. """Run stand-alone tests on the current spec.
Arguments: Args:
kwargs (dict): dictionary of arguments used to run the tests cdash: cdash handler instance
fail_fast: terminate tests after the first failure
List of recognized keys: log_file: test log file name if NOT CDash reporting
job_spec: spec that was built
* "cdash" (CDashHandler): (optional) cdash handler instance repro_dir: reproduction directory
* "fail_fast" (bool): (optional) terminate tests after the first failure timeout: maximum time (in seconds) that tests are allowed to run
* "log_file" (str): (optional) test log file name if NOT CDash reporting
* "job_spec" (Spec): spec that was built
* "repro_dir" (str): reproduction directory
""" """
cdash = kwargs.get("cdash")
fail_fast = kwargs.get("fail_fast")
log_file = kwargs.get("log_file")
if cdash and log_file: if cdash and log_file:
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting") tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
log_file = None log_file = None
# Error out but do NOT terminate if there are missing required arguments. # Error out but do NOT terminate if there are missing required arguments.
job_spec = kwargs.get("job_spec")
if not job_spec: if not job_spec:
tty.error("Job spec is required to run stand-alone tests") tty.error("Job spec is required to run stand-alone tests")
return return
repro_dir = kwargs.get("repro_dir")
if not repro_dir: if not repro_dir:
tty.error("Reproduction directory is required for stand-alone tests") tty.error("Reproduction directory is required for stand-alone tests")
return return
@ -1331,6 +1330,9 @@ def run_standalone_tests(**kwargs):
if fail_fast: if fail_fast:
test_args.append("--fail-fast") test_args.append("--fail-fast")
if timeout is not None:
test_args.extend(["--timeout", str(timeout)])
if cdash: if cdash:
test_args.extend(cdash.args()) test_args.extend(cdash.args())
else: else:

View File

@ -160,6 +160,12 @@ def setup_parser(subparser):
default=False, default=False,
help="stop stand-alone tests after the first failure", help="stop stand-alone tests after the first failure",
) )
rebuild.add_argument(
"--timeout",
type=int,
default=None,
help="maximum time (in seconds) that tests are allowed to run",
)
rebuild.set_defaults(func=ci_rebuild) rebuild.set_defaults(func=ci_rebuild)
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"]) spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
@ -521,6 +527,7 @@ def ci_rebuild(args):
fail_fast=args.fail_fast, fail_fast=args.fail_fast,
log_file=log_file, log_file=log_file,
repro_dir=repro_dir, repro_dir=repro_dir,
timeout=args.timeout,
) )
except Exception as err: except Exception as err:

View File

@ -65,6 +65,12 @@ def setup_parser(subparser):
run_parser.add_argument( run_parser.add_argument(
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting" "--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
) )
run_parser.add_argument(
"--timeout",
type=int,
default=None,
help="maximum time (in seconds) that tests are allowed to run",
)
cd_group = run_parser.add_mutually_exclusive_group() cd_group = run_parser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ["clean", "dirty"]) arguments.add_common_arguments(cd_group, ["clean", "dirty"])
@ -176,7 +182,7 @@ def test_run(args):
for spec in specs: for spec in specs:
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit) matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching: if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec)) tty.warn(f"No {explicit_str}installed packages match spec {spec}")
# TODO: Need to write out a log message and/or CDASH Testing # TODO: Need to write out a log message and/or CDASH Testing
# output that package not installed IF continue to process # output that package not installed IF continue to process
@ -192,7 +198,7 @@ def test_run(args):
# test_stage_dir # test_stage_dir
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias) test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
test_suite.ensure_stage() test_suite.ensure_stage()
tty.msg("Spack test %s" % test_suite.name) tty.msg(f"Spack test {test_suite.name}")
# Set up reporter # Set up reporter
setattr(args, "package", [s.format() for s in test_suite.specs]) setattr(args, "package", [s.format() for s in test_suite.specs])
@ -204,6 +210,7 @@ def test_run(args):
dirty=args.dirty, dirty=args.dirty,
fail_first=args.fail_first, fail_first=args.fail_first,
externals=args.externals, externals=args.externals,
timeout=args.timeout,
) )

View File

@ -12,7 +12,7 @@
import shutil import shutil
import sys import sys
from collections import Counter, OrderedDict from collections import Counter, OrderedDict
from typing import Callable, List, Optional, Tuple, Type, TypeVar, Union from typing import Callable, Iterable, List, Optional, Tuple, Type, TypeVar, Union
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@ -391,7 +391,7 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
if self.test_failures: if self.test_failures:
raise TestFailure(self.test_failures) raise TestFailure(self.test_failures)
def stand_alone_tests(self, kwargs): def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
"""Run the package's stand-alone tests. """Run the package's stand-alone tests.
Args: Args:
@ -399,7 +399,9 @@ def stand_alone_tests(self, kwargs):
""" """
import spack.build_environment # avoid circular dependency import spack.build_environment # avoid circular dependency
spack.build_environment.start_build_process(self.pkg, test_process, kwargs) spack.build_environment.start_build_process(
self.pkg, test_process, kwargs, timeout=timeout
)
def parts(self) -> int: def parts(self) -> int:
"""The total number of (checked) test parts.""" """The total number of (checked) test parts."""
@ -847,7 +849,7 @@ def write_test_summary(counts: "Counter"):
class TestSuite: class TestSuite:
"""The class that manages specs for ``spack test run`` execution.""" """The class that manages specs for ``spack test run`` execution."""
def __init__(self, specs, alias=None): def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None:
# copy so that different test suites have different package objects # copy so that different test suites have different package objects
# even if they contain the same spec # even if they contain the same spec
self.specs = [spec.copy() for spec in specs] self.specs = [spec.copy() for spec in specs]
@ -855,42 +857,43 @@ def __init__(self, specs, alias=None):
self.current_base_spec = None # spec currently running do_test self.current_base_spec = None # spec currently running do_test
self.alias = alias self.alias = alias
self._hash = None self._hash: Optional[str] = None
self._stage = None self._stage: Optional[Prefix] = None
self.counts: "Counter" = Counter() self.counts: "Counter" = Counter()
@property @property
def name(self): def name(self) -> str:
"""The name (alias or, if none, hash) of the test suite.""" """The name (alias or, if none, hash) of the test suite."""
return self.alias if self.alias else self.content_hash return self.alias if self.alias else self.content_hash
@property @property
def content_hash(self): def content_hash(self) -> str:
"""The hash used to uniquely identify the test suite.""" """The hash used to uniquely identify the test suite."""
if not self._hash: if not self._hash:
json_text = sjson.dump(self.to_dict()) json_text = sjson.dump(self.to_dict())
assert json_text is not None, f"{__name__} unexpected value for 'json_text'"
sha = hashlib.sha1(json_text.encode("utf-8")) sha = hashlib.sha1(json_text.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower() b32_hash = base64.b32encode(sha.digest()).lower()
b32_hash = b32_hash.decode("utf-8") b32_hash = b32_hash.decode("utf-8")
self._hash = b32_hash self._hash = b32_hash
return self._hash return self._hash
def __call__(self, *args, **kwargs): def __call__(
self,
*,
remove_directory: bool = True,
dirty: bool = False,
fail_first: bool = False,
externals: bool = False,
timeout: Optional[int] = None,
):
self.write_reproducibility_data() self.write_reproducibility_data()
remove_directory = kwargs.get("remove_directory", True)
dirty = kwargs.get("dirty", False)
fail_first = kwargs.get("fail_first", False)
externals = kwargs.get("externals", False)
for spec in self.specs: for spec in self.specs:
try: try:
if spec.package.test_suite: if spec.package.test_suite:
raise TestSuiteSpecError( raise TestSuiteSpecError(
"Package {} cannot be run in two test suites at once".format( f"Package {spec.package.name} cannot be run in two test suites at once"
spec.package.name
)
) )
# Set up the test suite to know which test is running # Set up the test suite to know which test is running
@ -905,7 +908,7 @@ def __call__(self, *args, **kwargs):
fs.mkdirp(test_dir) fs.mkdirp(test_dir)
# run the package tests # run the package tests
spec.package.do_test(dirty=dirty, externals=externals) spec.package.do_test(dirty=dirty, externals=externals, timeout=timeout)
# Clean up on success # Clean up on success
if remove_directory: if remove_directory:
@ -956,15 +959,12 @@ def __call__(self, *args, **kwargs):
if failures: if failures:
raise TestSuiteFailure(failures) raise TestSuiteFailure(failures)
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]: def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus:
"""Determine the overall test results status for the spec. """Returns the overall test results status for the spec.
Args: Args:
spec: instance of the spec under test spec: instance of the spec under test
externals: ``True`` if externals are to be tested, else ``False`` externals: ``True`` if externals are to be tested, else ``False``
Returns:
the spec's test status if available or ``None``
""" """
tests_status_file = self.tested_file_for_spec(spec) tests_status_file = self.tested_file_for_spec(spec)
if not os.path.exists(tests_status_file): if not os.path.exists(tests_status_file):
@ -981,109 +981,84 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
value = (f.read()).strip("\n") value = (f.read()).strip("\n")
return TestStatus(int(value)) if value else TestStatus.NO_TESTS return TestStatus(int(value)) if value else TestStatus.NO_TESTS
def ensure_stage(self): def ensure_stage(self) -> None:
"""Ensure the test suite stage directory exists.""" """Ensure the test suite stage directory exists."""
if not os.path.exists(self.stage): if not os.path.exists(self.stage):
fs.mkdirp(self.stage) fs.mkdirp(self.stage)
@property @property
def stage(self): def stage(self) -> Prefix:
"""The root test suite stage directory. """The root test suite stage directory"""
Returns:
str: the spec's test stage directory path
"""
if not self._stage: if not self._stage:
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash)) self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
return self._stage return self._stage
@stage.setter @stage.setter
def stage(self, value): def stage(self, value: Union[Prefix, str]) -> None:
"""Set the value of a non-default stage directory.""" """Set the value of a non-default stage directory."""
self._stage = value if isinstance(value, Prefix) else Prefix(value) self._stage = value if isinstance(value, Prefix) else Prefix(value)
@property @property
def results_file(self): def results_file(self) -> Prefix:
"""The path to the results summary file.""" """The path to the results summary file."""
return self.stage.join(results_filename) return self.stage.join(results_filename)
@classmethod @classmethod
def test_pkg_id(cls, spec): def test_pkg_id(cls, spec: Spec) -> str:
"""The standard install test package identifier. """The standard install test package identifier.
Args: Args:
spec: instance of the spec under test spec: instance of the spec under test
Returns:
str: the install test package identifier
""" """
return spec.format_path("{name}-{version}-{hash:7}") return spec.format_path("{name}-{version}-{hash:7}")
@classmethod @classmethod
def test_log_name(cls, spec): def test_log_name(cls, spec: Spec) -> str:
"""The standard log filename for a spec. """The standard log filename for a spec.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
Returns:
str: the spec's log filename
""" """
return "%s-test-out.txt" % cls.test_pkg_id(spec) return f"{cls.test_pkg_id(spec)}-test-out.txt"
def log_file_for_spec(self, spec): def log_file_for_spec(self, spec: Spec) -> Prefix:
"""The test log file path for the provided spec. """The test log file path for the provided spec.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
Returns:
str: the path to the spec's log file
""" """
return self.stage.join(self.test_log_name(spec)) return self.stage.join(self.test_log_name(spec))
def test_dir_for_spec(self, spec): def test_dir_for_spec(self, spec: Spec) -> Prefix:
"""The path to the test stage directory for the provided spec. """The path to the test stage directory for the provided spec.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
Returns:
str: the spec's test stage directory path
""" """
return Prefix(self.stage.join(self.test_pkg_id(spec))) return Prefix(self.stage.join(self.test_pkg_id(spec)))
@classmethod @classmethod
def tested_file_name(cls, spec): def tested_file_name(cls, spec: Spec) -> str:
"""The standard test status filename for the spec. """The standard test status filename for the spec.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
Returns:
str: the spec's test status filename
""" """
return "%s-tested.txt" % cls.test_pkg_id(spec) return "%s-tested.txt" % cls.test_pkg_id(spec)
def tested_file_for_spec(self, spec): def tested_file_for_spec(self, spec: Spec) -> str:
"""The test status file path for the spec. """The test status file path for the spec.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
Returns:
str: the spec's test status file path
""" """
return fs.join_path(self.stage, self.tested_file_name(spec)) return fs.join_path(self.stage, self.tested_file_name(spec))
@property @property
def current_test_cache_dir(self): def current_test_cache_dir(self) -> str:
"""Path to the test stage directory where the current spec's cached """Path to the test stage directory where the current spec's cached
build-time files were automatically copied. build-time files were automatically copied.
Returns:
str: path to the current spec's staged, cached build-time files.
Raises: Raises:
TestSuiteSpecError: If there is no spec being tested TestSuiteSpecError: If there is no spec being tested
""" """
@ -1095,13 +1070,10 @@ def current_test_cache_dir(self):
return self.test_dir_for_spec(base_spec).cache.join(test_spec.name) return self.test_dir_for_spec(base_spec).cache.join(test_spec.name)
@property @property
def current_test_data_dir(self): def current_test_data_dir(self) -> str:
"""Path to the test stage directory where the current spec's custom """Path to the test stage directory where the current spec's custom
package (data) files were automatically copied. package (data) files were automatically copied.
Returns:
str: path to the current spec's staged, custom package (data) files
Raises: Raises:
TestSuiteSpecError: If there is no spec being tested TestSuiteSpecError: If there is no spec being tested
""" """
@ -1112,17 +1084,17 @@ def current_test_data_dir(self):
base_spec = self.current_base_spec base_spec = self.current_base_spec
return self.test_dir_for_spec(base_spec).data.join(test_spec.name) return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
def write_test_result(self, spec, result): def write_test_result(self, spec: Spec, result: TestStatus) -> None:
"""Write the spec's test result to the test suite results file. """Write the spec's test result to the test suite results file.
Args: Args:
spec (spack.spec.Spec): instance of the spec under test spec: instance of the spec under test
result (str): result from the spec's test execution (e.g, PASSED) result: result from the spec's test execution (e.g, PASSED)
""" """
msg = f"{self.test_pkg_id(spec)} {result}" msg = f"{self.test_pkg_id(spec)} {result}"
_add_msg_to_file(self.results_file, msg) _add_msg_to_file(self.results_file, msg)
def write_reproducibility_data(self): def write_reproducibility_data(self) -> None:
for spec in self.specs: for spec in self.specs:
repo_cache_path = self.stage.repo.join(spec.name) repo_cache_path = self.stage.repo.join(spec.name)
spack.repo.PATH.dump_provenance(spec, repo_cache_path) spack.repo.PATH.dump_provenance(spec, repo_cache_path)
@ -1167,12 +1139,12 @@ def from_dict(d):
return TestSuite(specs, alias) return TestSuite(specs, alias)
@staticmethod @staticmethod
def from_file(filename): def from_file(filename: str) -> "TestSuite":
"""Instantiate a TestSuite using the specs and optional alias """Instantiate a TestSuite using the specs and optional alias
provided in the given file. provided in the given file.
Args: Args:
filename (str): The path to the JSON file containing the test filename: The path to the JSON file containing the test
suite specs and optional alias. suite specs and optional alias.
Raises: Raises:

View File

@ -1821,7 +1821,7 @@ def _resource_stage(self, resource):
resource_stage_folder = "-".join(pieces) resource_stage_folder = "-".join(pieces)
return resource_stage_folder return resource_stage_folder
def do_test(self, dirty=False, externals=False): def do_test(self, *, dirty=False, externals=False, timeout: Optional[int] = None):
if self.test_requires_compiler and not any( if self.test_requires_compiler and not any(
lang in self.spec for lang in ("c", "cxx", "fortran") lang in self.spec for lang in ("c", "cxx", "fortran")
): ):
@ -1839,7 +1839,7 @@ def do_test(self, dirty=False, externals=False):
"verbose": tty.is_verbose(), "verbose": tty.is_verbose(),
} }
self.tester.stand_alone_tests(kwargs) self.tester.stand_alone_tests(kwargs, timeout=timeout)
def unit_test_check(self): def unit_test_check(self):
"""Hook for unit tests to assert things about package internals. """Hook for unit tests to assert things about package internals.

View File

@ -1,9 +1,12 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import multiprocessing
import os import os
import posixpath import posixpath
import sys import sys
from typing import Dict, Optional, Tuple
import pytest import pytest
@ -828,3 +831,88 @@ def test_extra_rpaths_is_set(
assert os.environ["SPACK_COMPILER_EXTRA_RPATHS"] == expected_rpaths assert os.environ["SPACK_COMPILER_EXTRA_RPATHS"] == expected_rpaths
else: else:
assert "SPACK_COMPILER_EXTRA_RPATHS" not in os.environ assert "SPACK_COMPILER_EXTRA_RPATHS" not in os.environ
class _TestProcess:
calls: Dict[str, int] = collections.defaultdict(int)
terminated = False
runtime = 0
def __init__(self, *, target, args):
self.alive = None
self.exitcode = 0
self._reset()
def start(self):
self.calls["start"] += 1
self.alive = True
def is_alive(self):
self.calls["is_alive"] += 1
return self.alive
def join(self, timeout: Optional[int] = None):
self.calls["join"] += 1
if timeout is not None and timeout > self.runtime:
self.alive = False
def terminate(self):
self.calls["terminate"] += 1
self._set_terminated()
self.alive = False
@classmethod
def _set_terminated(cls):
cls.terminated = True
@classmethod
def _reset(cls):
cls.calls.clear()
cls.terminated = False
class _TestPipe:
def close(self):
pass
def recv(self):
if _TestProcess.terminated is True:
return 1
return 0
def _pipe_fn(*, duplex: bool = False) -> Tuple[_TestPipe, _TestPipe]:
return _TestPipe(), _TestPipe()
@pytest.fixture()
def mock_build_process(monkeypatch):
monkeypatch.setattr(spack.build_environment, "BuildProcess", _TestProcess)
monkeypatch.setattr(multiprocessing, "Pipe", _pipe_fn)
def _factory(*, runtime: int):
_TestProcess.runtime = runtime
return _factory
@pytest.mark.parametrize(
"runtime,timeout,expected_result,expected_calls",
[
# execution time < timeout
(2, 5, 0, {"start": 1, "join": 1, "is_alive": 1}),
# execution time > timeout
(5, 2, 1, {"start": 1, "join": 2, "is_alive": 1, "terminate": 1}),
],
)
def test_build_process_timeout(
mock_build_process, runtime, timeout, expected_result, expected_calls
):
"""Tests that we make the correct function calls in different timeout scenarios."""
mock_build_process(runtime=runtime)
result = spack.build_environment.start_build_process(
pkg=None, function=None, kwargs={}, timeout=timeout
)
assert result == expected_result
assert _TestProcess.calls == expected_calls

View File

@ -22,7 +22,7 @@ ci:
script:: script::
- - if [ -n "$SPACK_EXTRA_MIRROR" ]; then spack mirror add local "${SPACK_EXTRA_MIRROR}/${SPACK_CI_STACK_NAME}"; fi - - if [ -n "$SPACK_EXTRA_MIRROR" ]; then spack mirror add local "${SPACK_EXTRA_MIRROR}/${SPACK_CI_STACK_NAME}"; fi
- spack config blame mirrors - spack config blame mirrors
- - spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) - - spack --color=always --backtrace ci rebuild -j ${SPACK_BUILD_JOBS} --tests --timeout 300 > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
after_script: after_script:
- - cat /proc/loadavg || true - - cat /proc/loadavg || true
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true - cat /proc/meminfo | grep 'MemTotal\|MemFree' || true

View File

@ -700,7 +700,7 @@ _spack_ci_rebuild_index() {
} }
_spack_ci_rebuild() { _spack_ci_rebuild() {
SPACK_COMPREPLY="-h --help -t --tests --fail-fast -j --jobs" SPACK_COMPREPLY="-h --help -t --tests --fail-fast --timeout -j --jobs"
} }
_spack_ci_reproduce_build() { _spack_ci_reproduce_build() {
@ -1903,7 +1903,7 @@ _spack_test() {
_spack_test_run() { _spack_test_run() {
if $list_options if $list_options
then then
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals -x --explicit --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty" SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals -x --explicit --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --timeout --clean --dirty"
else else
_installed_packages _installed_packages
fi fi

View File

@ -989,13 +989,15 @@ complete -c spack -n '__fish_spack_using_command ci rebuild-index' -s h -l help
complete -c spack -n '__fish_spack_using_command ci rebuild-index' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command ci rebuild-index' -s h -l help -d 'show this help message and exit'
# spack ci rebuild # spack ci rebuild
set -g __fish_spack_optspecs_spack_ci_rebuild h/help t/tests fail-fast j/jobs= set -g __fish_spack_optspecs_spack_ci_rebuild h/help t/tests fail-fast timeout= j/jobs=
complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command ci rebuild' -s h -l help -d 'show this help message and exit'
complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -f -a tests complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -f -a tests
complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -d 'run stand-alone tests after the build' complete -c spack -n '__fish_spack_using_command ci rebuild' -s t -l tests -d 'run stand-alone tests after the build'
complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -f -a fail_fast complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -f -a fail_fast
complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -d 'stop stand-alone tests after the first failure' complete -c spack -n '__fish_spack_using_command ci rebuild' -l fail-fast -d 'stop stand-alone tests after the first failure'
complete -c spack -n '__fish_spack_using_command ci rebuild' -l timeout -r -f -a timeout
complete -c spack -n '__fish_spack_using_command ci rebuild' -l timeout -r -d 'maximum time (in seconds) that tests are allowed to run'
complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -f -a jobs complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -f -a jobs
complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -d 'explicitly set number of parallel jobs' complete -c spack -n '__fish_spack_using_command ci rebuild' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
@ -2950,7 +2952,7 @@ complete -c spack -n '__fish_spack_using_command test' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command test' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command test' -s h -l help -d 'show this help message and exit'
# spack test run # spack test run
set -g __fish_spack_optspecs_spack_test_run h/help alias= fail-fast fail-first externals x/explicit keep-stage log-format= log-file= cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= help-cdash clean dirty set -g __fish_spack_optspecs_spack_test_run h/help alias= fail-fast fail-first externals x/explicit keep-stage log-format= log-file= cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= help-cdash timeout= clean dirty
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 test run' -f -a '(__fish_spack_installed_specs)' complete -c spack -n '__fish_spack_using_command_pos_remainder 0 test run' -f -a '(__fish_spack_installed_specs)'
complete -c spack -n '__fish_spack_using_command test run' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command test run' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command test run' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command test run' -s h -l help -d 'show this help message and exit'
@ -2977,6 +2979,8 @@ complete -c spack -n '__fish_spack_using_command test run' -l cdash-track -r -f
complete -c spack -n '__fish_spack_using_command test run' -l cdash-buildstamp -r -f -a cdash_buildstamp complete -c spack -n '__fish_spack_using_command test run' -l cdash-buildstamp -r -f -a cdash_buildstamp
complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -f -a help_cdash complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -f -a help_cdash
complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -d 'show usage instructions for CDash reporting' complete -c spack -n '__fish_spack_using_command test run' -l help-cdash -d 'show usage instructions for CDash reporting'
complete -c spack -n '__fish_spack_using_command test run' -l timeout -r -f -a timeout
complete -c spack -n '__fish_spack_using_command test run' -l timeout -r -d 'maximum time (in seconds) that tests are allowed to run'
complete -c spack -n '__fish_spack_using_command test run' -l clean -f -a dirty complete -c spack -n '__fish_spack_using_command test run' -l clean -f -a dirty
complete -c spack -n '__fish_spack_using_command test run' -l clean -d 'unset harmful variables in the build environment (default)' complete -c spack -n '__fish_spack_using_command test run' -l clean -d 'unset harmful variables in the build environment (default)'
complete -c spack -n '__fish_spack_using_command test run' -l dirty -f -a dirty complete -c spack -n '__fish_spack_using_command test run' -l dirty -f -a dirty