Preserve jobserver file descriptors into build environment (#30302)

This ensures that multiple spack instances called from `make` will respect the maximum number of jobs in the POSIX jobserver across packages.

Co-authored-by: Harmen Stoppels <harmenstoppels@gmail.com>
This commit is contained in:
Tom Scogland 2022-05-05 21:09:58 -07:00 committed by GitHub
parent 3137e7c61b
commit d3a0ac1c0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 33 additions and 14 deletions

View File

@ -111,6 +111,20 @@
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so' dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
def should_set_parallel_jobs(jobserver_support=False):
"""Returns true in general, except when:
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
- jobserver_support is enabled, and a jobserver was found.
"""
if (
jobserver_support and
'MAKEFLAGS' in os.environ and
'--jobserver' in os.environ['MAKEFLAGS']
):
return False
return not env_flag(SPACK_NO_PARALLEL_MAKE)
class MakeExecutable(Executable): class MakeExecutable(Executable):
"""Special callable executable object for make so the user can specify """Special callable executable object for make so the user can specify
parallelism options on a per-invocation basis. Specifying parallelism options on a per-invocation basis. Specifying
@ -120,9 +134,6 @@ class MakeExecutable(Executable):
call will name an environment variable which will be set to the call will name an environment variable which will be set to the
parallelism level (without affecting the normal invocation with parallelism level (without affecting the normal invocation with
-j). -j).
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
""" """
def __init__(self, name, jobs): def __init__(self, name, jobs):
@ -133,9 +144,8 @@ def __call__(self, *args, **kwargs):
"""parallel, and jobs_env from kwargs are swallowed and used here; """parallel, and jobs_env from kwargs are swallowed and used here;
remaining arguments are passed through to the superclass. remaining arguments are passed through to the superclass.
""" """
parallel = should_set_parallel_jobs(jobserver_support=True) and \
disable = env_flag(SPACK_NO_PARALLEL_MAKE) kwargs.pop('parallel', self.jobs > 1)
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
if parallel: if parallel:
args = ('-j{0}'.format(self.jobs),) + args args = ('-j{0}'.format(self.jobs),) + args
@ -181,7 +191,7 @@ def clean_environment():
env.unset('PYTHONPATH') env.unset('PYTHONPATH')
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build # Affects GNU make, can e.g. indirectly inhibit enabling parallel build
env.unset('MAKEFLAGS') # env.unset('MAKEFLAGS')
# Avoid that libraries of build dependencies get hijacked. # Avoid that libraries of build dependencies get hijacked.
env.unset('LD_PRELOAD') env.unset('LD_PRELOAD')
@ -1028,7 +1038,7 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe, def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
input_multiprocess_fd): input_multiprocess_fd, jsfd1, jsfd2):
context = kwargs.get('context', 'build') context = kwargs.get('context', 'build')
@ -1135,6 +1145,8 @@ def child_fun():
""" """
parent_pipe, child_pipe = multiprocessing.Pipe() parent_pipe, child_pipe = multiprocessing.Pipe()
input_multiprocess_fd = None input_multiprocess_fd = None
jobserver_fd1 = None
jobserver_fd2 = None
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg) serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
@ -1144,11 +1156,17 @@ def child_fun():
'fileno'): 'fileno'):
input_fd = os.dup(sys.stdin.fileno()) input_fd = os.dup(sys.stdin.fileno())
input_multiprocess_fd = MultiProcessFd(input_fd) input_multiprocess_fd = MultiProcessFd(input_fd)
mflags = os.environ.get('MAKEFLAGS', False)
if mflags:
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
if m:
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
p = multiprocessing.Process( p = multiprocessing.Process(
target=_setup_pkg_and_run, target=_setup_pkg_and_run,
args=(serialized_pkg, function, kwargs, child_pipe, args=(serialized_pkg, function, kwargs, child_pipe,
input_multiprocess_fd)) input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
p.start() p.start()

View File

@ -202,7 +202,8 @@ def streamify(arg, mode):
stdin=istream, stdin=istream,
stderr=estream, stderr=estream,
stdout=ostream, stdout=ostream,
env=env) env=env,
close_fds=False,)
out, err = proc.communicate() out, err = proc.communicate()
result = None result = None

View File

@ -281,10 +281,10 @@ def bootstrap_args(self):
self.generator = make self.generator = make
if not sys.platform == 'win32': if not sys.platform == 'win32':
args.extend( args.append('--prefix={0}'.format(self.prefix))
['--prefix={0}'.format(self.prefix),
'--parallel={0}'.format(make_jobs)] if spack.build_environment.should_set_parallel_jobs(jobserver_support=True):
) args.append('--parallel={0}'.format(make_jobs))
if '+ownlibs' in spec: if '+ownlibs' in spec:
# Build and link to the CMake-provided third-party libraries # Build and link to the CMake-provided third-party libraries