Compiler search uses a pool of workers (#10190)

- spack.compilers.find_compilers now uses a multiprocess.pool.ThreadPool to execute
  system commands for the detection of compiler versions.

- A few memoized functions have been introduced to avoid poking the filesystem multiple
  times for the same results.

- Performance is much improved, and Spack no longer fork-bombs the system when doing a `compiler find`
This commit is contained in:
Massimiliano Culpo 2019-06-07 18:57:26 +02:00 committed by Todd Gamblin
parent 9c1c50fb76
commit 6d56d45454
17 changed files with 501 additions and 300 deletions

View File

@ -21,7 +21,7 @@
import six
from llnl.util import tty
from llnl.util.lang import dedupe
from llnl.util.lang import dedupe, memoized
from spack.util.executable import Executable
__all__ = [
@ -1351,3 +1351,63 @@ def find_libraries(libraries, root, shared=True, recursive=False):
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries]
return LibraryList(find(root, libraries, recursive))
@memoized
def can_access_dir(path):
"""Returns True if the argument is an accessible directory.
Args:
path: path to be tested
Returns:
True if ``path`` is an accessible directory, else False
"""
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
@memoized
def files_in(*search_paths):
"""Returns all the files in paths passed as arguments.
Caller must ensure that each path in ``search_paths`` is a directory.
Args:
*search_paths: directories to be searched
Returns:
List of (file, full_path) tuples with all the files found.
"""
files = []
for d in filter(can_access_dir, search_paths):
files.extend(filter(
lambda x: os.path.isfile(x[1]),
[(f, os.path.join(d, f)) for f in os.listdir(d)]
))
return files
def search_paths_for_executables(*path_hints):
"""Given a list of path hints returns a list of paths where
to search for an executable.
Args:
*path_hints (list of paths): list of paths taken into
consideration for a search
Returns:
A list containing the real path of every existing directory
in `path_hints` and its `bin` subdirectory if it exists.
"""
executable_paths = []
for path in path_hints:
if not os.path.isdir(path):
continue
executable_paths.append(path)
bin_dir = os.path.join(path, 'bin')
if os.path.isdir(bin_dir):
executable_paths.append(bin_dir)
return executable_paths

View File

@ -8,25 +8,9 @@
than multiprocessing.Pool.apply() can. For example, apply() will fail
to pickle functions if they're passed indirectly as parameters.
"""
from multiprocessing import Process, Pipe, Semaphore, Value
from multiprocessing import Semaphore, Value
__all__ = ['spawn', 'parmap', 'Barrier']
def spawn(f):
def fun(pipe, x):
pipe.send(f(x))
pipe.close()
return fun
def parmap(f, elements):
pipe = [Pipe() for x in elements]
proc = [Process(target=spawn(f), args=(c, x))
for x, (p, c) in zip(elements, pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
return [p.recv() for (p, c) in pipe]
__all__ = ['Barrier']
class Barrier:

View File

@ -3,7 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from datetime import datetime
from __future__ import unicode_literals
import fcntl
import os
import struct
@ -11,6 +12,8 @@
import termios
import textwrap
import traceback
import six
from datetime import datetime
from six import StringIO
from six.moves import input
@ -155,7 +158,7 @@ def msg(message, *args, **kwargs):
cwrite("@*b{%s==>} %s%s" % (
st_text, get_timestamp(), cescape(message)))
for arg in args:
print(indent + str(arg))
print(indent + six.text_type(arg))
def info(message, *args, **kwargs):
@ -172,17 +175,17 @@ def info(message, *args, **kwargs):
if _stacktrace:
st_text = process_stacktrace(st_countback)
cprint("@%s{%s==>} %s%s" % (
format, st_text, get_timestamp(), cescape(str(message))),
stream=stream)
format, st_text, get_timestamp(), cescape(six.text_type(message))
), stream=stream)
for arg in args:
if wrap:
lines = textwrap.wrap(
str(arg), initial_indent=indent, subsequent_indent=indent,
break_long_words=break_long_words)
six.text_type(arg), initial_indent=indent,
subsequent_indent=indent, break_long_words=break_long_words)
for line in lines:
stream.write(line + '\n')
else:
stream.write(indent + str(arg) + '\n')
stream.write(indent + six.text_type(arg) + '\n')
def verbose(message, *args, **kwargs):
@ -204,7 +207,7 @@ def error(message, *args, **kwargs):
kwargs.setdefault('format', '*r')
kwargs.setdefault('stream', sys.stderr)
info("Error: " + str(message), *args, **kwargs)
info("Error: " + six.text_type(message), *args, **kwargs)
def warn(message, *args, **kwargs):
@ -213,7 +216,7 @@ def warn(message, *args, **kwargs):
kwargs.setdefault('format', '*Y')
kwargs.setdefault('stream', sys.stderr)
info("Warning: " + str(message), *args, **kwargs)
info("Warning: " + six.text_type(message), *args, **kwargs)
def die(message, *args, **kwargs):
@ -237,7 +240,7 @@ def get_number(prompt, **kwargs):
while number is None:
msg(prompt, newline=False)
ans = input()
if ans == str(abort):
if ans == six.text_type(abort):
return None
if ans:
@ -303,7 +306,7 @@ def hline(label=None, **kwargs):
cols -= 2
cols = min(max_width, cols)
label = str(label)
label = six.text_type(label)
prefix = char * 2 + " "
suffix = " " + (cols - len(prefix) - clen(label)) * char

View File

@ -6,7 +6,7 @@
"""
Routines for printing columnar output. See ``colify()`` for more information.
"""
from __future__ import division
from __future__ import division, unicode_literals
import os
import sys

View File

@ -59,10 +59,14 @@
To output an @, use '@@'. To output a } inside braces, use '}}'.
"""
from __future__ import unicode_literals
import re
import sys
from contextlib import contextmanager
import six
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
@ -244,7 +248,7 @@ def cescape(string):
Returns:
(str): the string with color codes escaped
"""
string = str(string)
string = six.text_type(string)
string = string.replace('@', '@@')
string = string.replace('}', '}}')
return string

View File

@ -5,6 +5,8 @@
"""Utility classes for logging the output of blocks of code.
"""
from __future__ import unicode_literals
import multiprocessing
import os
import re

View File

@ -56,18 +56,15 @@
attributes front_os and back_os. The operating system as described earlier,
will be responsible for compiler detection.
"""
import os
import inspect
import platform as py_platform
import llnl.util.multiproc as mp
import llnl.util.tty as tty
from llnl.util.lang import memoized, list_modules, key_ordering
import spack.compiler
import spack.paths
import spack.error as serr
from spack.util.naming import mod_to_class
from spack.util.environment import get_path
from spack.util.spack_yaml import syaml_dict
@ -229,100 +226,13 @@ def __repr__(self):
return self.__str__()
def _cmp_key(self):
return (self.name, self.version)
def find_compilers(self, *paths):
"""
Return a list of compilers found in the supplied paths.
This invokes the find() method for each Compiler class,
and appends the compilers detected to a list.
"""
if not paths:
paths = get_path('PATH')
# Make sure path elements exist, and include /bin directories
# under prefixes.
filtered_path = []
for p in paths:
# Eliminate symlinks and just take the real directories.
p = os.path.realpath(p)
if not os.path.isdir(p):
continue
filtered_path.append(p)
# Check for a bin directory, add it if it exists
bin = os.path.join(p, 'bin')
if os.path.isdir(bin):
filtered_path.append(os.path.realpath(bin))
# Once the paths are cleaned up, do a search for each type of
# compiler. We can spawn a bunch of parallel searches to reduce
# the overhead of spelunking all these directories.
# NOTE: we import spack.compilers here to avoid init order cycles
import spack.compilers
types = spack.compilers.all_compiler_types()
compiler_lists = mp.parmap(
lambda cmp_cls: self.find_compiler(cmp_cls, *filtered_path),
types)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = [comp for cl in compiler_lists for comp in cl]
return clist
def find_compiler(self, cmp_cls, *path):
"""Try to find the given type of compiler in the user's
environment. For each set of compilers found, this returns
compiler objects with the cc, cxx, f77, fc paths and the
version filled in.
This will search for compilers with the names in cc_names,
cxx_names, etc. and it will group them if they have common
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
"""
dicts = mp.parmap(
lambda t: cmp_cls._find_matches_in_path(*t),
[(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
(cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
(cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
(cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
all_keys = set()
for d in dicts:
all_keys.update(d)
compilers = {}
for k in all_keys:
ver, pre, suf = k
# Skip compilers with unknown version.
if ver == 'unknown':
continue
paths = tuple(pn[k] if k in pn else None for pn in dicts)
spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
if ver in compilers:
prev = compilers[ver]
# prefer the one with more compilers.
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
newcount = len([p for p in paths if p is not None])
prevcount = len([p for p in prev_paths if p is not None])
# Don't add if it's not an improvement over prev compiler.
if newcount <= prevcount:
continue
compilers[ver] = cmp_cls(spec, self, py_platform.machine(), paths)
return list(compilers.values())
return self.name, self.version
def to_dict(self):
d = {}
d['name'] = self.name
d['version'] = self.version
return d
return {
'name': self.name,
'version': self.version
}
@key_ordering

View File

@ -79,7 +79,7 @@ def compiler_find(args):
# Just let compiler_find do the
# entire process and return an empty config from all_compilers
# Default for any other process is init_config=True
compilers = [c for c in spack.compilers.find_compilers(*paths)]
compilers = [c for c in spack.compilers.find_compilers(paths)]
new_compilers = []
for c in compilers:
arch_spec = ArchSpec(None, c.operating_system, c.target)

View File

@ -7,15 +7,12 @@
import re
import itertools
import llnl.util.lang
import llnl.util.tty as tty
import llnl.util.multiproc as mp
import llnl.util.filesystem
import spack.error
import spack.spec
import spack.architecture
from spack.util.executable import Executable, ProcessError
from spack.util.environment import get_path
import spack.util.executable
__all__ = ['Compiler']
@ -35,7 +32,7 @@ def get_compiler_version_output(compiler_path, version_arg):
compiler_path (path): path of the compiler to be invoked
version_arg (str): the argument used to extract version information
"""
compiler = Executable(compiler_path)
compiler = spack.util.executable.Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
return output
@ -250,52 +247,19 @@ def fc_version(cls, fc):
return cls.default_version(fc)
@classmethod
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
"""Finds compilers in the paths supplied.
Looks for all combinations of ``compiler_names`` with the
``prefixes`` and ``suffixes`` defined for this compiler
class. If any compilers match the compiler_names,
prefixes, or suffixes, uses ``detect_version`` to figure
out what version the compiler is.
This returns a dict with compilers grouped by (prefix,
suffix, version) tuples. This can be further organized by
find().
"""
if not path:
path = get_path('PATH')
def search_regexps(cls, language):
# Compile all the regular expressions used for files beforehand.
# This searches for any combination of <prefix><name><suffix>
# defined for the compiler
compiler_names = getattr(cls, '{0}_names'.format(language))
prefixes = [''] + cls.prefixes
suffixes = [''] + cls.suffixes
checks = []
for directory in path:
if not (os.path.isdir(directory) and
os.access(directory, os.R_OK | os.X_OK)):
continue
files = os.listdir(directory)
for exe in files:
full_path = os.path.join(directory, exe)
prod = itertools.product(prefixes, compiler_names, suffixes)
for pre, name, suf in prod:
regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf)
match = re.match(regex, exe)
if match:
key = (full_path,) + match.groups() + (detect_version,)
checks.append(key)
successful = [k for k in mp.parmap(_get_versioned_tuple, checks)
if k is not None]
# The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence.
successful.reverse()
return dict(((v, p, s), path) for v, p, s, path in successful)
regexp_fmt = r'^({0}){1}({2})$'
return [
re.compile(regexp_fmt.format(prefix, re.escape(name), suffix))
for prefix, name, suffix in
itertools.product(prefixes, compiler_names, suffixes)
]
def setup_custom_environment(self, pkg, env):
"""Set any environment variables necessary to use the compiler."""
@ -313,28 +277,6 @@ def __str__(self):
str(self.operating_system)))))
def _get_versioned_tuple(compiler_check_tuple):
full_path, prefix, suffix, detect_version = compiler_check_tuple
try:
version = detect_version(full_path)
if (not version) or (not str(version).strip()):
tty.debug(
"Couldn't get version for compiler %s" % full_path)
return None
return (version, prefix, suffix, full_path)
except ProcessError as e:
tty.debug(
"Couldn't get version for compiler %s" % full_path, e)
return None
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s"
% full_path,
"%s: %s" % (e.__class__.__name__, e))
return None
class CompilerAccessError(spack.error.SpackError):
def __init__(self, path):

View File

@ -6,9 +6,17 @@
"""This module contains functions related to finding compilers on the
system and configuring Spack to use multiple compilers.
"""
import collections
import itertools
import multiprocessing.pool
import os
from llnl.util.lang import list_modules
import platform as py_platform
import six
import llnl.util.lang
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.paths
import spack.error
@ -16,6 +24,7 @@
import spack.config
import spack.architecture
import spack.util.imp as simp
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
_imported_compilers_module = 'spack.compilers'
@ -176,18 +185,55 @@ def all_compiler_specs(scope=None, init_config=True):
for s in all_compilers_config(scope, init_config)]
def find_compilers(*paths):
"""Return a list of compilers found in the supplied paths.
This invokes the find_compilers() method for each operating
system associated with the host platform, and appends
the compilers detected to a list.
def find_compilers(path_hints=None):
"""Returns the list of compilers found in the paths given as arguments.
Args:
path_hints (list or None): list of path hints where to look for.
A sensible default based on the ``PATH`` environment variable
will be used if the value is None
Returns:
List of compilers found
"""
# Find compilers for each operating system class
oss = all_os_classes()
compiler_lists = []
for o in oss:
compiler_lists.extend(o.find_compilers(*paths))
return compiler_lists
if path_hints is None:
path_hints = get_path('PATH')
default_paths = fs.search_paths_for_executables(*path_hints)
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, 'compiler_search_paths', default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
def valid_version(item):
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
def remove_errors(item):
value, _ = item
return value
return make_compiler_list(
map(remove_errors, filter(valid_version, detected_versions))
)
def supported_compilers():
@ -196,8 +242,8 @@ def supported_compilers():
See available_compilers() to get a list of all the available
versions of supported compilers.
"""
return sorted(
name for name in list_modules(spack.paths.compilers_path))
return sorted(name for name in
llnl.util.lang.list_modules(spack.paths.compilers_path))
@_auto_compiler_spec
@ -358,6 +404,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
return cfg_file_to_duplicates
@llnl.util.lang.memoized
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
assert(supported(compiler_name))
@ -390,6 +437,192 @@ def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
#: Gathers the attribute values by which a detected compiler is considered
#: unique in Spack.
#:
#: - os: the operating system
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
#: - version: the version of the compiler
#:
CompilerID = collections.namedtuple(
'CompilerID', ['os', 'compiler_name', 'version']
)
#: Variations on a matched compiler name
NameVariation = collections.namedtuple('NameVariation', ['prefix', 'suffix'])
#: Groups together the arguments needed by `detect_version`. The four entries
#: in the tuple are:
#:
#: - id: An instance of the CompilerID named tuple (version can be set to None
#: as it will be detected later)
#: - variation: a NameVariation for file being tested
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
#: - path: full path to the executable being tested
#:
DetectVersionArgs = collections.namedtuple(
'DetectVersionArgs', ['id', 'variation', 'language', 'path']
)
def arguments_to_detect_version_fn(operating_system, paths):
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
The ``operating_system`` instance can customize the behavior of this
function by providing a method called with the same name.
Args:
operating_system (OperatingSystem): the operating system on which
we are looking for compilers
paths: paths to search for compilers
Returns:
List of DetectVersionArgs tuples. Each item in the list will be later
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
def _default(search_paths):
command_arguments = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in spack.compilers.supported_compilers():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ('cc', 'cxx', 'f77', 'fc'):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested,
compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
compiler_id = CompilerID(
operating_system, compiler_name, None
)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
language=language, path=full_path
)
command_arguments.append(detect_version_args)
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intended precedence.
return reversed(command_arguments)
fn = getattr(
operating_system, 'arguments_to_detect_version_fn', _default
)
return fn(paths)
def detect_version(detect_version_args):
"""Computes the version of a compiler and adds it to the information
passed as input.
As this function is meant to be executed by worker processes it won't
raise any exception but instead will return a (value, error) tuple that
needs to be checked by the code dispatching the calls.
Args:
detect_version_args (DetectVersionArgs): information on the
compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
version of the compiler was computed correctly and the first argument
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, '{0}_version'.format(language))
try:
version = callback(path)
if version and six.text_type(version).strip() \
and version != 'unknown':
value = fn_args._replace(
id=compiler_id._replace(version=version)
)
return value, None
error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e:
error = "Couldn't get version for compiler {0}\n".format(path) + \
six.text_type(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" \
"\n{1}: {2}".format(path, e.__class__.__name__,
six.text_type(e))
return None, error
operating_system = detect_version_args.id.os
fn = getattr(operating_system, 'detect_version', _default)
return fn(detect_version_args)
def make_compiler_list(detected_versions):
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions (list): list of DetectVersionArgs containing a
valid version
Returns:
list of Compiler objects
"""
# We don't sort on the path of the compiler
sort_fn = lambda x: (x.id, x.variation, x.language)
compilers_s = sorted(detected_versions, key=sort_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d = {}
for sort_key, group in itertools.groupby(compilers_s, key=sort_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
# For each unique compiler id select the name variation with most entries
# i.e. the one that supports most languages
compilers = []
def _default(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
paths = [paths.get(l, None) for l in ('cc', 'cxx', 'f77', 'fc')]
compiler = compiler_cls(
spec, operating_system, py_platform.machine(), paths
)
return [compiler]
for compiler_id, by_compiler_id in compilers_d.items():
_, selected_name_variation = max(
(len(by_compiler_id[variation]), variation)
for variation in by_compiler_id
)
# Add it to the list of compilers
selected = by_compiler_id[selected_name_variation]
operating_system, _, _ = compiler_id
make_compilers = getattr(operating_system, 'make_compilers', _default)
compilers.extend(make_compilers(compiler_id, selected))
return compilers
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):

View File

@ -6,7 +6,6 @@
import re
import llnl.util.tty as tty
import llnl.util.multiproc as mp
from spack.architecture import OperatingSystem
from spack.util.module_cmd import module
@ -24,6 +23,7 @@ def __init__(self):
name = 'cnl'
version = self._detect_crayos_version()
super(Cnl, self).__init__(name, version)
self.modulecmd = module
def __str__(self):
return self.name + str(self.version)
@ -35,38 +35,54 @@ def _detect_crayos_version(self):
latest_version = max(major_versions)
return latest_version
def find_compilers(self, *paths):
# function-local so that cnl doesn't depend on spack.config
def arguments_to_detect_version_fn(self, paths):
import spack.compilers
types = spack.compilers.all_compiler_types()
compiler_lists = mp.parmap(
lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
command_arguments = []
for compiler_name in spack.compilers.supported_compilers():
cmp_cls = spack.compilers.class_for_compiler_name(compiler_name)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = [comp for cl in compiler_lists for comp in cl]
return clist
# If the compiler doesn't have a corresponding
# Programming Environment, skip to the next
if cmp_cls.PrgEnv is None:
continue
def find_compiler(self, cmp_cls, *paths):
# function-local so that cnl doesn't depend on spack.config
import spack.spec
compilers = []
if cmp_cls.PrgEnv:
if not cmp_cls.PrgEnv_compiler:
if cmp_cls.PrgEnv_compiler is None:
tty.die('Must supply PrgEnv_compiler with PrgEnv')
output = module('avail', cmp_cls.PrgEnv_compiler)
version_regex = r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler
matches = re.findall(version_regex, output)
for name, version in matches:
v = version
comp = cmp_cls(
spack.spec.CompilerSpec(name + '@' + v),
self, "any",
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
compiler_id = spack.compilers.CompilerID(self, compiler_name, None)
detect_version_args = spack.compilers.DetectVersionArgs(
id=compiler_id, variation=(None, None),
language='cc', path='cc'
)
command_arguments.append(detect_version_args)
return command_arguments
compilers.append(comp)
def detect_version(self, detect_version_args):
import spack.compilers
modulecmd = self.modulecmd
compiler_name = detect_version_args.id.compiler_name
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
output = modulecmd('avail', compiler_cls.PrgEnv_compiler)
version_regex = r'(%s)/([\d\.]+[\d])' % compiler_cls.PrgEnv_compiler
matches = re.findall(version_regex, output)
version = tuple(version for _, version in matches)
compiler_id = detect_version_args.id
value = detect_version_args._replace(
id=compiler_id._replace(version=version)
)
return value, None
def make_compilers(self, compiler_id, paths):
import spack.spec
name = compiler_id.compiler_name
cmp_cls = spack.compilers.class_for_compiler_name(name)
compilers = []
for v in compiler_id.version:
comp = cmp_cls(
spack.spec.CompilerSpec(name + '@' + v),
self, "any",
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
compilers.append(comp)
return compilers

View File

@ -3,52 +3,63 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import os
import llnl.util.filesystem as fs
from spack.operating_systems.linux_distro import LinuxDistro
from spack.util.environment import get_path
from spack.util.module_cmd import module
@contextlib.contextmanager
def unload_programming_environment():
"""Context manager that unloads Cray Programming Environments."""
env_bu = None
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
# environment variable.
if 'PE_ENV' in os.environ:
# Copy environment variables to restore them after the compiler
# detection. We expect that the only thing PrgEnv-* modules do is
# the environment variables modifications.
env_bu = os.environ.copy()
# Get the name of the module from the environment variable.
prg_env = 'PrgEnv-' + os.environ['PE_ENV'].lower()
# Unload the PrgEnv-* module. By doing this we intentionally
# provoke errors when the Cray's compiler wrappers are executed
# (Error: A PrgEnv-* modulefile must be loaded.) so they will not
# be detected as valid compilers by the overridden method. We also
# expect that the modules that add the actual compilers' binaries
# into the PATH environment variable (i.e. the following modules:
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
# specified as prerequisites in the PrgEnv-* modulefiles.
module('unload', prg_env)
yield
# Restore the environment.
if env_bu is not None:
os.environ.clear()
os.environ.update(env_bu)
class CrayFrontend(LinuxDistro):
"""Represents OS that runs on login and service nodes of the Cray platform.
It acts as a regular Linux without Cray-specific modules and compiler
wrappers."""
def find_compilers(self, *paths):
"""Calls the overridden method but prevents it from detecting Cray
compiler wrappers to avoid possible false detections. The detected
compilers come into play only if a user decides to work with the Cray's
frontend OS as if it was a regular Linux environment."""
@property
def compiler_search_paths(self):
"""Calls the default function but unloads Cray's programming
environments first.
env_bu = None
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
# environment variable.
if 'PE_ENV' in os.environ:
# Copy environment variables to restore them after the compiler
# detection. We expect that the only thing PrgEnv-* modules do is
# the environment variables modifications.
env_bu = os.environ.copy()
# Get the name of the module from the environment variable.
prg_env = 'PrgEnv-' + os.environ['PE_ENV'].lower()
# Unload the PrgEnv-* module. By doing this we intentionally
# provoke errors when the Cray's compiler wrappers are executed
# (Error: A PrgEnv-* modulefile must be loaded.) so they will not
# be detected as valid compilers by the overridden method. We also
# expect that the modules that add the actual compilers' binaries
# into the PATH environment variable (i.e. the following modules:
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
# specified as prerequisites in the PrgEnv-* modulefiles.
module('unload', prg_env)
# Call the overridden method.
clist = super(CrayFrontend, self).find_compilers(*paths)
# Restore the environment.
if env_bu is not None:
os.environ.clear()
os.environ.update(env_bu)
return clist
This prevents from detecting Cray compiler wrappers and avoids
possible false detections.
"""
with unload_programming_environment():
search_paths = fs.search_paths_for_executables(*get_path('PATH'))
return search_paths

View File

@ -1385,7 +1385,7 @@ def bootstrap_compiler(self, **kwargs):
dep.concretize()
dep.package.do_install(**kwargs)
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers(dep.prefix)
spack.compilers.find_compilers([dep.prefix])
)
def do_install(self, **kwargs):

View File

@ -141,3 +141,10 @@ def test_user_input_combination(config):
)
res = all(results)
assert res
def test_operating_system_conversion_to_dict():
operating_system = spack.architecture.OperatingSystem('os', '1.0')
assert operating_system.to_dict() == {
'name': 'os', 'version': '1.0'
}

View File

@ -3,7 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import json
import sys
from jsonschema import validate
@ -36,6 +39,11 @@ def test_specs_deps(tmpdir, config):
validate(deps_object, specs_deps_schema)
@pytest.mark.skipif(
sys.version_info[:2] < (2, 7),
reason="For some reason in Python2.6 we get a utf-32 string "
"that can't be parsed"
)
def test_specs_staging(config):
"""Make sure we achieve the best possible staging for the following
spec DAG::

View File

@ -3,11 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import pytest
import llnl.util.filesystem
import llnl.util.multiproc
import spack.cmd.compiler
import spack.compilers
@ -58,12 +55,7 @@ def test_compiler_remove(self):
compilers = spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers
def test_compiler_add(self, mock_compiler_dir, monkeypatch):
# This test randomly stall on Travis when spawning processes
# in Python 2.6 unit tests
if sys.version_info < (3, 0, 0):
monkeypatch.setattr(llnl.util.multiproc, 'parmap', map)
def test_compiler_add(self, mock_compiler_dir):
# Compilers available by default.
old_compilers = set(spack.compilers.all_compiler_specs())

View File

@ -5,6 +5,8 @@
import pytest
import sys
from copy import copy
from six import iteritems
@ -23,7 +25,29 @@
import spack.compilers.xl_r
import spack.compilers.fj
from spack.compiler import _get_versioned_tuple, Compiler
from spack.compiler import Compiler
@pytest.fixture()
def make_args_for_version(monkeypatch):
def _factory(version, path='/usr/bin/gcc'):
class MockOs(object):
pass
compiler_name = 'gcc'
compiler_cls = compilers.class_for_compiler_name(compiler_name)
monkeypatch.setattr(compiler_cls, 'cc_version', lambda x: version)
compiler_id = compilers.CompilerID(
os=MockOs, compiler_name=compiler_name, version=None
)
variation = compilers.NameVariation(prefix='', suffix='')
return compilers.DetectVersionArgs(
id=compiler_id, variation=variation, language='cc', path=path
)
return _factory
def test_get_compiler_duplicates(config):
@ -45,17 +69,22 @@ def test_all_compilers(config):
assert len(filtered) == 1
def test_version_detection_is_empty():
no_version = lambda x: None
compiler_check_tuple = ('/usr/bin/gcc', '', r'\d\d', no_version)
assert not _get_versioned_tuple(compiler_check_tuple)
@pytest.mark.skipif(
sys.version_info[0] == 2, reason='make_args_for_version requires python 3'
)
@pytest.mark.parametrize('input_version,expected_version,expected_error', [
(None, None, "Couldn't get version for compiler /usr/bin/gcc"),
('4.9', '4.9', None)
])
def test_version_detection_is_empty(
make_args_for_version, input_version, expected_version, expected_error
):
args = make_args_for_version(version=input_version)
result, error = compilers.detect_version(args)
if not error:
assert result.id.version == expected_version
def test_version_detection_is_successful():
version = lambda x: '4.9'
compiler_check_tuple = ('/usr/bin/gcc', '', r'\d\d', version)
assert _get_versioned_tuple(compiler_check_tuple) == (
'4.9', '', r'\d\d', '/usr/bin/gcc')
assert error == expected_error
def test_compiler_flags_from_config_are_grouped():