find_compilers
has been parallelized using ThreadPool
There should be no issues with the GIL, as the functions that are invoked in parallel spawn a subprocess to start a system call and then wait for I/O.
This commit is contained in:
parent
db6ef96e87
commit
5330509bfe
@ -36,18 +36,20 @@ def invoke(f):
|
||||
return f()
|
||||
|
||||
|
||||
def execute(command_list, executor=map):
|
||||
def execute(command_list, map_fn=map, transformation=invoke):
|
||||
"""Execute a list of packaged commands and return their result.
|
||||
|
||||
Args:
|
||||
command_list: list of commands to be executed
|
||||
executor: object that execute each command. Must have the
|
||||
same semantic as ``map``.
|
||||
map_fn: object that execute each command. Must have the
|
||||
same semantic as ``map``
|
||||
transformation: callable invoked on each item to construct
|
||||
the output list
|
||||
|
||||
Returns:
|
||||
List of results
|
||||
"""
|
||||
return executor(invoke, command_list)
|
||||
return map_fn(transformation, command_list)
|
||||
|
||||
|
||||
class Barrier:
|
||||
|
@ -60,7 +60,6 @@
|
||||
import inspect
|
||||
import itertools
|
||||
|
||||
import llnl.util.multiproc
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized, list_modules, key_ordering
|
||||
|
||||
@ -255,17 +254,6 @@ def search_compiler_commands(self, *path_hints):
|
||||
)
|
||||
return commands
|
||||
|
||||
def find_compilers(self, *path_hints):
|
||||
"""
|
||||
Return a list of compilers found in the supplied paths.
|
||||
This invokes the find() method for each Compiler class,
|
||||
and appends the compilers detected to a list.
|
||||
"""
|
||||
commands = self.search_compiler_commands(*path_hints)
|
||||
compilers = llnl.util.multiproc.execute(commands)
|
||||
compilers = spack.compiler.discard_invalid(compilers)
|
||||
return spack.compiler.make_compiler_list(compilers)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
|
@ -375,7 +375,8 @@ def detect_version_command(
|
||||
return None
|
||||
|
||||
|
||||
def discard_invalid(compilers):
|
||||
def _discard_invalid(compilers):
|
||||
"""Removes invalid compilers from the list"""
|
||||
# Remove search with no results
|
||||
compilers = filter(None, compilers)
|
||||
|
||||
@ -389,6 +390,8 @@ def has_known_version(compiler_entry):
|
||||
|
||||
|
||||
def make_compiler_list(compilers):
|
||||
compilers = _discard_invalid(compilers)
|
||||
|
||||
# Group by (os, compiler type, version), (prefix, suffix), language
|
||||
def sort_key_fn(item):
|
||||
key, _ = item
|
||||
@ -396,6 +399,10 @@ def sort_key_fn(item):
|
||||
(key.prefix, key.suffix), key.language
|
||||
|
||||
compilers_s = sorted(compilers, key=sort_key_fn)
|
||||
# This dictionary is needed because a class (NOT an instance of it)
|
||||
# doesn't have __lt__ or other similar functions defined. Therefore
|
||||
# we sort on its string representation and need to maintain the map
|
||||
# to the class here
|
||||
cmp_cls_d = {str(key.cmp_cls): key.cmp_cls for key, _ in compilers_s}
|
||||
|
||||
compilers_d = {}
|
||||
|
@ -7,8 +7,10 @@
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
|
||||
|
||||
import llnl.util.multiproc
|
||||
from llnl.util.lang import list_modules
|
||||
|
||||
@ -193,10 +195,10 @@ def find_compilers(*paths):
|
||||
search_commands = itertools.chain.from_iterable(
|
||||
o.search_compiler_commands(*paths) for o in all_os_classes()
|
||||
)
|
||||
# TODO: activate multiprocessing
|
||||
# with multiprocessing.Pool(processes=None) as p:
|
||||
compilers = llnl.util.multiproc.execute(search_commands, executor=map)
|
||||
compilers = spack.compiler.discard_invalid(compilers)
|
||||
|
||||
with multiprocessing.pool.ThreadPool() as tp:
|
||||
compilers = llnl.util.multiproc.execute(search_commands, map_fn=tp.map)
|
||||
|
||||
return spack.compiler.make_compiler_list(compilers)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user