Reduce verbosity of error messages when concretizing environments (#26843)

With this commit stacktraces of subprocesses are shown only if debug mode is active
This commit is contained in:
Massimiliano Culpo 2021-10-20 13:30:07 +02:00 committed by GitHub
parent 26b58701bc
commit 56209cb114
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 25 additions and 17 deletions

View File

@ -1161,7 +1161,8 @@ def _concretize_separately(self, tests=False):
tty.msg(msg) tty.msg(msg)
concretized_root_specs = spack.util.parallel.parallel_map( concretized_root_specs = spack.util.parallel.parallel_map(
_concretize_task, arguments, max_processes=max_processes _concretize_task, arguments, max_processes=max_processes,
debug=tty.is_debug()
) )
finish = time.time() finish = time.time()

View File

@ -10,8 +10,6 @@
import sys import sys
import traceback import traceback
import six
from .cpus import cpus_available from .cpus import cpus_available
@ -28,11 +26,16 @@ def __init__(self, exc_cls, exc, tb):
exc: exception raised from the worker process exc: exception raised from the worker process
""" """
self.pid = os.getpid() self.pid = os.getpid()
self.error_message = ''.join(traceback.format_exception(exc_cls, exc, tb)) self.error_message = str(exc)
self.stacktrace_message = ''.join(traceback.format_exception(exc_cls, exc, tb))
@property
def stacktrace(self):
msg = "[PID={0.pid}] {0.stacktrace_message}"
return msg.format(self)
def __str__(self): def __str__(self):
msg = "[PID={0.pid}] {0.error_message}" return self.error_message
return msg.format(self)
class Task(object): class Task(object):
@ -53,29 +56,31 @@ def __call__(self, *args, **kwargs):
return value return value
def raise_if_errors(*results): def raise_if_errors(*results, **kwargs):
"""Analyze results from worker Processes to search for ErrorFromWorker """Analyze results from worker Processes to search for ErrorFromWorker
objects. If found print all of them and raise an exception. objects. If found print all of them and raise an exception.
Args: Args:
*results: results from worker processes *results: results from worker processes
debug: if True show complete stacktraces
Raise: Raise:
RuntimeError: if ErrorFromWorker objects are in the results RuntimeError: if ErrorFromWorker objects are in the results
""" """
err_stream = six.StringIO() # sys.stderr debug = kwargs.get('debug', False) # This can be a keyword only arg in Python 3
errors = [x for x in results if isinstance(x, ErrorFromWorker)] errors = [x for x in results if isinstance(x, ErrorFromWorker)]
if not errors: if not errors:
return return
# Report the errors and then raise msg = '\n'.join([
for error in errors: error.stacktrace if debug else str(error) for error in errors
print(error, file=err_stream) ])
print('[PARENT PROCESS]:', file=err_stream) error_fmt = '{0}'
traceback.print_stack(file=err_stream) if len(errors) > 1 and not debug:
error_msg = 'errors occurred in worker processes:\n{0}' error_fmt = 'errors occurred during concretization of the environment:\n{0}'
raise RuntimeError(error_msg.format(err_stream.getvalue()))
raise RuntimeError(error_fmt.format(msg))
@contextlib.contextmanager @contextlib.contextmanager
@ -108,13 +113,15 @@ def num_processes(max_processes=None):
return min(cpus_available(), max_processes) return min(cpus_available(), max_processes)
def parallel_map(func, arguments, max_processes=None): def parallel_map(func, arguments, max_processes=None, debug=False):
"""Map a task object to the list of arguments, return the list of results. """Map a task object to the list of arguments, return the list of results.
Args: Args:
func (Task): user defined task object func (Task): user defined task object
arguments (list): list of arguments for the task arguments (list): list of arguments for the task
max_processes (int or None): maximum number of processes allowed max_processes (int or None): maximum number of processes allowed
debug (bool): if False, raise an exception containing just the error messages
from workers, if True an exception with complete stacktraces
Raises: Raises:
RuntimeError: if any error occurred in the worker processes RuntimeError: if any error occurred in the worker processes
@ -125,5 +132,5 @@ def parallel_map(func, arguments, max_processes=None):
results = p.map(task_wrapper, arguments) results = p.map(task_wrapper, arguments)
else: else:
results = list(map(task_wrapper, arguments)) results = list(map(task_wrapper, arguments))
raise_if_errors(*results) raise_if_errors(*results, debug=debug)
return results return results