Compiler search uses a pool of workers (#10190)
- spack.compilers.find_compilers now uses a multiprocess.pool.ThreadPool to execute system commands for the detection of compiler versions. - A few memoized functions have been introduced to avoid poking the filesystem multiple times for the same results. - Performance is much improved, and Spack no longer fork-bombs the system when doing a `compiler find`
This commit is contained in:
committed by
Todd Gamblin
parent
9c1c50fb76
commit
6d56d45454
@@ -8,25 +8,9 @@
|
||||
than multiprocessing.Pool.apply() can. For example, apply() will fail
|
||||
to pickle functions if they're passed indirectly as parameters.
|
||||
"""
|
||||
from multiprocessing import Process, Pipe, Semaphore, Value
|
||||
from multiprocessing import Semaphore, Value
|
||||
|
||||
__all__ = ['spawn', 'parmap', 'Barrier']
|
||||
|
||||
|
||||
def spawn(f):
|
||||
def fun(pipe, x):
|
||||
pipe.send(f(x))
|
||||
pipe.close()
|
||||
return fun
|
||||
|
||||
|
||||
def parmap(f, elements):
|
||||
pipe = [Pipe() for x in elements]
|
||||
proc = [Process(target=spawn(f), args=(c, x))
|
||||
for x, (p, c) in zip(elements, pipe)]
|
||||
[p.start() for p in proc]
|
||||
[p.join() for p in proc]
|
||||
return [p.recv() for (p, c) in pipe]
|
||||
__all__ = ['Barrier']
|
||||
|
||||
|
||||
class Barrier:
|
||||
|
||||
Reference in New Issue
Block a user