Reworked compiler finding/storing/format to allow for multiple compilers with the same spec for different operating systems. TODO: fix config tests. All others up to date
This commit is contained in:
parent
8cd13d4b35
commit
90b7b7ba5c
@ -22,6 +22,7 @@
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
from collections import namedtuple
|
||||
import imp
|
||||
import platform as py_platform
|
||||
@ -32,7 +33,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.compilers
|
||||
from spack.util.naming import mod_to_class
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.multiproc import parmap
|
||||
import spack.error as serr
|
||||
|
||||
|
||||
@ -143,6 +147,7 @@ def operating_system(self, name):
|
||||
|
||||
return self.operating_sys.get(name, None)
|
||||
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
""" Subclass is responsible for implementing this method.
|
||||
@ -170,10 +175,9 @@ class OperatingSystem(object):
|
||||
find compilers we call find_compilers method for each operating system
|
||||
"""
|
||||
|
||||
def __init__(self, name, version, compiler_strategy="PATH"):
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.version = version
|
||||
self.compiler_strategy = compiler_strategy
|
||||
|
||||
def __str__(self):
|
||||
return self.name + self.version
|
||||
@ -182,13 +186,96 @@ def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.version, self.compiler_strategy)
|
||||
return (self.name, self.version)
|
||||
|
||||
|
||||
def find_compilers(self, *paths):
|
||||
"""
|
||||
Return a list of compilers found in the suppied paths.
|
||||
This invokes the find() method for each Compiler class,
|
||||
and appends the compilers detected to a list.
|
||||
"""
|
||||
if not paths:
|
||||
paths = get_path('PATH')
|
||||
# Make sure path elements exist, and include /bin directories
|
||||
# under prefixes.
|
||||
filtered_path = []
|
||||
for p in paths:
|
||||
# Eliminate symlinks and just take the real directories.
|
||||
p = os.path.realpath(p)
|
||||
if not os.path.isdir(p):
|
||||
continue
|
||||
filtered_path.append(p)
|
||||
|
||||
# Check for a bin directory, add it if it exists
|
||||
bin = join_path(p, 'bin')
|
||||
if os.path.isdir(bin):
|
||||
filtered_path.append(os.path.realpath(bin))
|
||||
|
||||
# Once the paths are cleaned up, do a search for each type of
|
||||
# compiler. We can spawn a bunch of parallel searches to reduce
|
||||
# the overhead of spelunking all these directories.
|
||||
types = spack.compilers.all_compiler_types()
|
||||
compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *filtered_path), types)
|
||||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x,y: x+y, compiler_lists)
|
||||
return clist
|
||||
|
||||
def find_compiler(self, cmp_cls, *path):
|
||||
"""Try to find the given type of compiler in the user's
|
||||
environment. For each set of compilers found, this returns
|
||||
compiler objects with the cc, cxx, f77, fc paths and the
|
||||
version filled in.
|
||||
|
||||
This will search for compilers with the names in cc_names,
|
||||
cxx_names, etc. and it will group them if they have common
|
||||
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
|
||||
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
|
||||
"""
|
||||
dicts = parmap(
|
||||
lambda t: cmp_cls._find_matches_in_path(*t),
|
||||
[(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
|
||||
(cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
|
||||
(cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
|
||||
(cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
|
||||
|
||||
all_keys = set()
|
||||
for d in dicts:
|
||||
all_keys.update(d)
|
||||
|
||||
compilers = {}
|
||||
for k in all_keys:
|
||||
ver, pre, suf = k
|
||||
|
||||
# Skip compilers with unknown version.
|
||||
if ver == 'unknown':
|
||||
continue
|
||||
|
||||
paths = tuple(pn[k] if k in pn else None for pn in dicts)
|
||||
spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
|
||||
|
||||
if ver in compilers:
|
||||
prev = compilers[ver]
|
||||
|
||||
# prefer the one with more compilers.
|
||||
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
|
||||
newcount = len([p for p in paths if p is not None])
|
||||
prevcount = len([p for p in prev_paths if p is not None])
|
||||
|
||||
# Don't add if it's not an improvement over prev compiler.
|
||||
if newcount <= prevcount:
|
||||
continue
|
||||
|
||||
compilers[ver] = cmp_cls(spec, self, paths)
|
||||
|
||||
return list(compilers.values())
|
||||
|
||||
def to_dict(self):
|
||||
d = {}
|
||||
d['name'] = self.name
|
||||
d['version'] = self.version
|
||||
d['compiler_strategy'] = self.compiler_strategy
|
||||
|
||||
return d
|
||||
|
||||
@ -261,7 +348,6 @@ def _operating_system_from_dict(os_dict):
|
||||
operating_system = OperatingSystem.__new__(OperatingSystem)
|
||||
operating_system.name = os_dict['name']
|
||||
operating_system.version = os_dict['version']
|
||||
operating_system.compiler_strategy = os_dict['compiler_strategy']
|
||||
return operating_system
|
||||
|
||||
def arch_from_dict(d):
|
||||
|
@ -169,9 +169,8 @@ def set_compiler_environment_variables(pkg):
|
||||
|
||||
os.environ['SPACK_COMPILER_SPEC'] = str(pkg.spec.compiler)
|
||||
|
||||
if compiler.strategy == 'MODULES':
|
||||
for mod in compiler.modules:
|
||||
load_module(mod)
|
||||
for mod in compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
def set_build_environment_variables(pkg):
|
||||
"""This ensures a clean install environment when we build packages.
|
||||
|
@ -80,7 +80,7 @@ def compiler_add(args):
|
||||
if not paths:
|
||||
paths = get_path('PATH')
|
||||
|
||||
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
|
||||
compilers = [c for c in spack.compilers.find_compilers(*paths)
|
||||
if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
|
||||
|
||||
if compilers:
|
||||
@ -125,6 +125,8 @@ def compiler_info(args):
|
||||
print "\tcxx = %s" % c.cxx
|
||||
print "\tf77 = %s" % c.f77
|
||||
print "\tfc = %s" % c.fc
|
||||
print "\tmodules = %s" % c.modules
|
||||
print "\toperating system = %s" % c.operating_system
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
|
@ -106,14 +106,14 @@ class Compiler(object):
|
||||
PrgEnv_compiler = None
|
||||
|
||||
|
||||
def __init__(self, cspec, strategy, paths, modules=None):
|
||||
def __init__(self, cspec, operating_system, paths, modules=[], alias=None):
|
||||
def check(exe):
|
||||
if exe is None:
|
||||
return None
|
||||
_verify_executables(exe)
|
||||
return exe
|
||||
|
||||
self.strategy = strategy
|
||||
self.operating_system = operating_system
|
||||
|
||||
self.cc = check(paths[0])
|
||||
self.cxx = check(paths[1])
|
||||
@ -126,62 +126,33 @@ def check(exe):
|
||||
|
||||
self.spec = cspec
|
||||
self.modules = modules
|
||||
|
||||
self.alias = alias
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
#
|
||||
# Compiler *instances* are just data objects, and can only be
|
||||
# constructed from an actual set of executables.
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, cc):
|
||||
"""Override just this to override all compiler version functions."""
|
||||
return dumpversion(cc)
|
||||
|
||||
@classmethod
|
||||
def cc_version(cls, cc):
|
||||
return cls.default_version(cc)
|
||||
|
||||
@classmethod
|
||||
def cxx_version(cls, cxx):
|
||||
return cls.default_version(cxx)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.default_version(f77)
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
|
||||
"""Finds compilers in the paths supplied.
|
||||
|
||||
Looks for all combinations of ``compiler_names`` with the
|
||||
``prefixes`` and ``suffixes`` defined for this compiler
|
||||
class. If any compilers match the compiler_names,
|
||||
prefixes, or suffixes, uses ``detect_version`` to figure
|
||||
out what version the compiler is.
|
||||
|
||||
This returns a dict with compilers grouped by (prefix,
|
||||
suffix, version) tuples. This can be further organized by
|
||||
find().
|
||||
"""
|
||||
|
||||
Looks for all combinations of ``compiler_names`` with the
|
||||
``prefixes`` and ``suffixes`` defined for this compiler
|
||||
class. If any compilers match the compiler_names,
|
||||
prefixes, or suffixes, uses ``detect_version`` to figure
|
||||
out what version the compiler is.
|
||||
|
||||
This returns a dict with compilers grouped by (prefix,
|
||||
suffix, version) tuples. This can be further organized by
|
||||
find().
|
||||
"""
|
||||
if not path:
|
||||
path = get_path('PATH')
|
||||
|
||||
|
||||
prefixes = [''] + cls.prefixes
|
||||
suffixes = [''] + cls.suffixes
|
||||
|
||||
|
||||
checks = []
|
||||
for directory in path:
|
||||
if not (os.path.isdir(directory) and
|
||||
@ -219,89 +190,34 @@ def check(key):
|
||||
successful = [key for key in parmap(check, checks) if key is not None]
|
||||
return dict(((v, p, s), path) for v, p, s, path in successful)
|
||||
|
||||
@classmethod
|
||||
def find(cls, *path):
|
||||
compilers = []
|
||||
platform = spack.architecture.sys_type()
|
||||
strategies = [o.compiler_strategy for o in platform.operating_sys.values()]
|
||||
if 'PATH' in strategies:
|
||||
compilers.extend(cls.find_in_path(*path))
|
||||
if 'MODULES' in strategies:
|
||||
compilers.extend(cls.find_in_modules())
|
||||
return compilers
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
#
|
||||
# Compiler *instances* are just data objects, and can only be
|
||||
# constructed from an actual set of executables.
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def find_in_path(cls, *path):
|
||||
"""Try to find this type of compiler in the user's
|
||||
environment. For each set of compilers found, this returns
|
||||
compiler objects with the cc, cxx, f77, fc paths and the
|
||||
version filled in.
|
||||
|
||||
This will search for compilers with the names in cc_names,
|
||||
cxx_names, etc. and it will group them if they have common
|
||||
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
|
||||
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
|
||||
"""
|
||||
dicts = parmap(
|
||||
lambda t: cls._find_matches_in_path(*t),
|
||||
[(cls.cc_names, cls.cc_version) + tuple(path),
|
||||
(cls.cxx_names, cls.cxx_version) + tuple(path),
|
||||
(cls.f77_names, cls.f77_version) + tuple(path),
|
||||
(cls.fc_names, cls.fc_version) + tuple(path)])
|
||||
|
||||
all_keys = set()
|
||||
for d in dicts:
|
||||
all_keys.update(d)
|
||||
|
||||
compilers = {}
|
||||
for k in all_keys:
|
||||
ver, pre, suf = k
|
||||
|
||||
# Skip compilers with unknown version.
|
||||
if ver == 'unknown':
|
||||
continue
|
||||
|
||||
paths = tuple(pn[k] if k in pn else None for pn in dicts)
|
||||
spec = spack.spec.CompilerSpec(cls.name, ver)
|
||||
|
||||
if ver in compilers:
|
||||
prev = compilers[ver]
|
||||
|
||||
# prefer the one with more compilers.
|
||||
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
|
||||
newcount = len([p for p in paths if p is not None])
|
||||
prevcount = len([p for p in prev_paths if p is not None])
|
||||
|
||||
# Don't add if it's not an improvement over prev compiler.
|
||||
if newcount <= prevcount:
|
||||
continue
|
||||
|
||||
compilers[ver] = cls(spec, 'PATH', paths)
|
||||
|
||||
return list(compilers.values())
|
||||
|
||||
def default_version(cls, cc):
|
||||
"""Override just this to override all compiler version functions."""
|
||||
return dumpversion(cc)
|
||||
|
||||
@classmethod
|
||||
def find_in_modules(cls):
|
||||
compilers = []
|
||||
if cls.PrgEnv:
|
||||
if not cls.PrgEnv_compiler:
|
||||
tty.die('Must supply PrgEnv_compiler with PrgEnv')
|
||||
def cc_version(cls, cc):
|
||||
return cls.default_version(cc)
|
||||
|
||||
modulecmd = which('modulecmd')
|
||||
modulecmd.add_default_arg('python')
|
||||
|
||||
output = modulecmd('avail', cls.PrgEnv_compiler, output=str, error=str)
|
||||
matches = re.findall(r'(%s)/([\d\.]+[\d])' % cls.PrgEnv_compiler, output)
|
||||
for name, version in matches:
|
||||
v = version
|
||||
comp = cls(spack.spec.CompilerSpec(name + '@' + v), 'MODULES',
|
||||
['cc', 'CC', 'ftn'], [cls.PrgEnv, name +'/' + v])
|
||||
@classmethod
|
||||
def cxx_version(cls, cxx):
|
||||
return cls.default_version(cxx)
|
||||
|
||||
compilers.append(comp)
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.default_version(f77)
|
||||
|
||||
return compilers
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
@ -311,12 +227,8 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string represntation of the compiler toolchain."""
|
||||
if self.strategy is 'MODULES':
|
||||
return "%s(%s)" % (
|
||||
self.name, '\n '.join((str(s) for s in (self.strategy, self.cc, self.cxx, self.f77, self.fc, self.modules))))
|
||||
else:
|
||||
return "%s(%s)" % (
|
||||
self.name, '\n '.join((str(s) for s in (self.strategy, self.cc, self.cxx, self.f77, self.fc))))
|
||||
return "%s(%s)" % (
|
||||
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system)))))
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
|
@ -29,6 +29,10 @@
|
||||
import os
|
||||
import platform
|
||||
import copy
|
||||
import hashlib
|
||||
import base64
|
||||
import yaml
|
||||
import sys
|
||||
|
||||
from llnl.util.lang import memoized, list_modules
|
||||
from llnl.util.filesystem import join_path
|
||||
@ -46,8 +50,8 @@
|
||||
from spack.util.environment import get_path
|
||||
|
||||
_imported_compilers_module = 'spack.compilers'
|
||||
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_optional_instance_vars = ['modules', 'strategy']
|
||||
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_other_instance_vars = ['modules', 'operating_system']
|
||||
|
||||
_default_order = []
|
||||
# TODO: customize order in config file
|
||||
@ -67,93 +71,85 @@ def converter(cspec_like, *args, **kwargs):
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
d = {}
|
||||
d['spec'] = str(compiler.spec)
|
||||
d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars )
|
||||
d['operating_system'] = compiler.operating_system.to_dict()
|
||||
d['modules'] = compiler.modules
|
||||
|
||||
if not compiler.alias:
|
||||
yaml_text = yaml.dump(
|
||||
d, default_flow_style=True, width=sys.maxint)
|
||||
sha = hashlib.sha1(yaml_text)
|
||||
compiler.alias = base64.b32encode(sha.digest()).lower()[:8]
|
||||
return {
|
||||
str(compiler.spec) : dict(
|
||||
(attr, getattr(compiler, attr, None))
|
||||
for attr in _required_instance_vars + _optional_instance_vars)
|
||||
compiler.alias: d
|
||||
}
|
||||
|
||||
|
||||
def get_compiler_config(arch=None, scope=None):
|
||||
def get_compiler_config(scope=None):
|
||||
"""Return the compiler configuration for the specified architecture.
|
||||
"""
|
||||
# Check whether we're on a front-end (native) architecture.
|
||||
my_arch = spack.architecture.Arch()
|
||||
if arch is None:
|
||||
arch = my_arch
|
||||
if isinstance(arch, basestring) and arch == 'all':
|
||||
name = 'all'
|
||||
else:
|
||||
name = arch.platform.name
|
||||
|
||||
def init_compiler_config():
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
config[name] = {}
|
||||
compilers = find_compilers(*get_path('PATH'))
|
||||
config = {}
|
||||
compilers = find_compilers()
|
||||
for compiler in compilers:
|
||||
config[name].update(_to_dict(compiler))
|
||||
config.update(_to_dict(compiler))
|
||||
spack.config.update_config('compilers', config, scope=scope)
|
||||
|
||||
config = spack.config.get_config('compilers', scope=scope)
|
||||
# Update the configuration if there are currently no compilers
|
||||
# configured. Avoid updating automatically if there ARE site
|
||||
# compilers configured but no user ones.
|
||||
if (isinstance(arch, basestring) or arch == my_arch) and arch not in config:
|
||||
if scope is None:
|
||||
# We know no compilers were configured in any scope.
|
||||
# if (isinstance(arch, basestring) or arch == my_arch) and arch not in config:
|
||||
if scope is None:
|
||||
# We know no compilers were configured in any scope.
|
||||
init_compiler_config()
|
||||
elif scope == 'user':
|
||||
# Check the site config and update the user config if
|
||||
# nothing is configured at the site level.
|
||||
site_config = spack.config.get_config('compilers', scope='site')
|
||||
if not site_config:
|
||||
init_compiler_config()
|
||||
elif scope == 'user':
|
||||
# Check the site config and update the user config if
|
||||
# nothing is configured at the site level.
|
||||
site_config = spack.config.get_config('compilers', scope='site')
|
||||
if not site_config:
|
||||
init_compiler_config()
|
||||
|
||||
return config[name] if name in config else {}
|
||||
return config
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, arch=None, scope=None):
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
- compilers: a list of Compiler objects.
|
||||
- arch: arch to add compilers for.
|
||||
- scope: configuration scope to modify.
|
||||
"""
|
||||
if arch is None:
|
||||
arch = spack.architecture.Arch()
|
||||
|
||||
compiler_config = get_compiler_config(arch, scope)
|
||||
compiler_config = get_compiler_config(scope)
|
||||
for compiler in compilers:
|
||||
compiler_config[str(compiler.spec)] = dict(
|
||||
(c, getattr(compiler, c, "None"))
|
||||
for c in _required_instance_vars + _optional_instance_vars)
|
||||
compiler_config = _to_dict(compiler)
|
||||
|
||||
update = { arch.platform.name : compiler_config }
|
||||
spack.config.update_config('compilers', update, scope)
|
||||
spack.config.update_config('compilers', compiler_config, scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
|
||||
Arguments:
|
||||
- compiler_specs: a list of CompilerSpec objects.
|
||||
- arch: arch to add compilers for.
|
||||
- scope: configuration scope to modify.
|
||||
"""
|
||||
if arch is None:
|
||||
arch = spack.architecture.Arch()
|
||||
compiler_config = get_compiler_config(scope)
|
||||
matches = [(a,c) for (a,c) in compiler_config.items() if c['spec'] == compiler_spec]
|
||||
if len(matches) == 1:
|
||||
del compiler_config[matches[0][0]]
|
||||
else:
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
|
||||
compiler_config = get_compiler_config(arch, scope)
|
||||
del compiler_config[str(compiler_spec)]
|
||||
update = { arch : compiler_config }
|
||||
|
||||
spack.config.update_config('compilers', update, scope)
|
||||
spack.config.update_config('compilers', compiler_config, scope)
|
||||
|
||||
_cache_config_file = {}
|
||||
|
||||
def all_compilers_config(arch=None, scope=None):
|
||||
def all_compilers_config(scope=None):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
@ -161,20 +157,16 @@ def all_compilers_config(arch=None, scope=None):
|
||||
global _cache_config_file #Create a cache of the config file so we don't load all the time.
|
||||
|
||||
if not _cache_config_file:
|
||||
arch_config = get_compiler_config(arch, scope)
|
||||
# Merge 'all' compilers with arch-specific ones.
|
||||
# Arch-specific compilers have higher precedence.
|
||||
_cache_config_file = get_compiler_config('all', scope=scope)
|
||||
_cache_config_file = spack.config._merge_yaml(_cache_config_file, arch_config)
|
||||
_cache_config_file = get_compiler_config(scope)
|
||||
return _cache_config_file
|
||||
|
||||
else:
|
||||
return _cache_config_file
|
||||
|
||||
def all_compilers(arch=None, scope=None):
|
||||
def all_compilers(scope=None):
|
||||
# Return compiler specs from the merged config.
|
||||
return [spack.spec.CompilerSpec(s)
|
||||
for s in all_compilers_config(arch, scope)]
|
||||
return [spack.spec.CompilerSpec(s['spec'])
|
||||
for s in all_compilers_config(scope).values()]
|
||||
|
||||
|
||||
def default_compiler():
|
||||
@ -189,37 +181,19 @@ def default_compiler():
|
||||
return sorted(versions)[-1]
|
||||
|
||||
|
||||
def find_compilers(*path):
|
||||
def find_compilers():
|
||||
"""Return a list of compilers found in the suppied paths.
|
||||
This invokes the find() method for each Compiler class,
|
||||
and appends the compilers detected to a list.
|
||||
This invokes the find_compilers() method for each operating
|
||||
system associated with the host platform, and appends
|
||||
the compilers detected to a list.
|
||||
"""
|
||||
# Make sure path elements exist, and include /bin directories
|
||||
# under prefixes.
|
||||
filtered_path = []
|
||||
for p in path:
|
||||
# Eliminate symlinks and just take the real directories.
|
||||
p = os.path.realpath(p)
|
||||
if not os.path.isdir(p):
|
||||
continue
|
||||
filtered_path.append(p)
|
||||
|
||||
# Check for a bin directory, add it if it exists
|
||||
bin = join_path(p, 'bin')
|
||||
if os.path.isdir(bin):
|
||||
filtered_path.append(os.path.realpath(bin))
|
||||
|
||||
# Once the paths are cleaned up, do a search for each type of
|
||||
# compiler. We can spawn a bunch of parallel searches to reduce
|
||||
# the overhead of spelunking all these directories.
|
||||
types = all_compiler_types()
|
||||
compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
|
||||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x,y: x+y, compiler_lists)
|
||||
return clist
|
||||
# Find compilers for each operating system class
|
||||
oss = all_os_classes()
|
||||
compiler_lists = []
|
||||
for os in oss:
|
||||
compiler_lists.extend(os.find_compilers())
|
||||
|
||||
return compiler_lists
|
||||
|
||||
def supported_compilers():
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
@ -237,47 +211,60 @@ def supported(compiler_spec):
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find(compiler_spec, arch=None, scope=None):
|
||||
def find(compiler_spec, scope=None):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an list if nothing found."""
|
||||
return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
|
||||
return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(compiler_spec, arch=None, scope=None):
|
||||
def compilers_for_spec(compiler_spec, scope=None):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(arch, scope)
|
||||
config = all_compilers_config(scope)
|
||||
|
||||
def get_compiler(cspec):
|
||||
items = config[str(cspec)]
|
||||
def get_compilers(cspec):
|
||||
compilers = []
|
||||
|
||||
if not all(n in items for n in _required_instance_vars):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
for aka, cmp in config.items():
|
||||
if cmp['spec'] != str(cspec):
|
||||
continue
|
||||
items = cmp
|
||||
alias = aka
|
||||
|
||||
if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
strategy = items['strategy']
|
||||
if not strategy:
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
compiler_paths = []
|
||||
for c in _path_instance_vars:
|
||||
compiler_path = items['paths'][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
|
||||
compiler_paths = []
|
||||
for c in _required_instance_vars:
|
||||
compiler_path = items[c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
mods = items.get('modules')
|
||||
if mods == 'None':
|
||||
mods = []
|
||||
|
||||
if 'operating_system' in items:
|
||||
operating_system = spack.architecture._operating_system_from_dict( items['operating_system'] )
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
operating_system = None
|
||||
|
||||
if 'modules' not in items:
|
||||
items['modules'] = None
|
||||
mods = items['modules']
|
||||
compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias))
|
||||
|
||||
return cls(cspec, strategy, compiler_paths, mods)
|
||||
return compilers
|
||||
|
||||
matches = find(compiler_spec, arch, scope)
|
||||
return [get_compiler(cspec) for cspec in matches]
|
||||
matches = set(find(compiler_spec, scope))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
compilers.extend(get_compilers(cspec))
|
||||
return compilers
|
||||
# return [get_compilers(cspec) for cspec in matches]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@ -285,8 +272,9 @@ def compiler_for_spec(compiler_spec, operating_system):
|
||||
"""Get the compiler that satisfies compiler_spec. compiler_spec must
|
||||
be concrete."""
|
||||
assert(compiler_spec.concrete)
|
||||
|
||||
compilers = [c for c in compilers_for_spec(compiler_spec)
|
||||
if c.strategy == operating_system.compiler_strategy]
|
||||
if c.operating_system == operating_system]
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, operating_system)
|
||||
if len(compilers) > 1:
|
||||
@ -308,8 +296,20 @@ def class_for_compiler_name(compiler_name):
|
||||
return cls
|
||||
|
||||
|
||||
def all_os_classes():
|
||||
"""
|
||||
Return the list of classes for all operating systems available on
|
||||
this platform
|
||||
"""
|
||||
classes = []
|
||||
|
||||
platform = spack.architecture.sys_type()
|
||||
for os_class in platform.operating_sys.values():
|
||||
classes.append(os_class)
|
||||
|
||||
return classes
|
||||
|
||||
def all_compiler_types():
|
||||
# return [class_for_compiler_name(c) for c in ['gcc']]
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
@ -318,7 +318,7 @@ def __init__(self, compiler_spec):
|
||||
super(InvalidCompilerConfigurationError, self).__init__(
|
||||
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
|
||||
"Compiler configuration must contain entries for all compilers: %s"
|
||||
% _required_instance_vars)
|
||||
% _path_instance_vars)
|
||||
|
||||
|
||||
class NoCompilersError(spack.error.SpackError):
|
||||
|
@ -301,15 +301,11 @@ def concretize_compiler(self, spec):
|
||||
# Should think whether this can be more efficient
|
||||
def _proper_compiler_style(cspec, architecture):
|
||||
compilers = spack.compilers.compilers_for_spec(cspec)
|
||||
filter(lambda c: c.strategy == architecture.platform_os.compiler_strategy, compilers)
|
||||
#if architecture.platform_os.compiler_strategy == 'PATH':
|
||||
# filter(lambda c: not c.modules, compilers)
|
||||
#if architecture.platform_os.compiler_strategy == 'MODULES':
|
||||
# filter(lambda c: c.modules, compilers)
|
||||
filter(lambda c: c.operating_system == architecture.platform_os, compilers)
|
||||
return compilers
|
||||
|
||||
|
||||
all_compilers = spack.compilers.all_compilers(spec.architecture)
|
||||
all_compilers = spack.compilers.all_compilers()
|
||||
|
||||
if (spec.compiler and
|
||||
spec.compiler.concrete and
|
||||
|
@ -151,17 +151,16 @@
|
||||
'default': {},
|
||||
'additionalProperties': False,
|
||||
'patternProperties': {
|
||||
r'\w[\w-]*': { # architecture
|
||||
r'\w[\w-]*': { # alias
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'patternProperties': {
|
||||
r'\w[\w-]*@\w[\w-]*': { # compiler spec
|
||||
'required': ['paths', 'spec', 'modules', 'operating_system'],
|
||||
'properties': {
|
||||
'paths': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['cc', 'cxx', 'f77', 'fc'],
|
||||
'properties': {
|
||||
'strategy': { 'anyOf': [ {'type' : 'string' },
|
||||
{'type' : 'null' }]},
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'cc': { 'anyOf': [ {'type' : 'string' },
|
||||
{'type' : 'null' }]},
|
||||
'cxx': { 'anyOf': [ {'type' : 'string' },
|
||||
@ -169,13 +168,21 @@
|
||||
'f77': { 'anyOf': [ {'type' : 'string' },
|
||||
{'type' : 'null' }]},
|
||||
'fc': { 'anyOf': [ {'type' : 'string' },
|
||||
{'type' : 'null' }]},
|
||||
'modules': { 'anyOf': [ {'type' : 'string'},
|
||||
{'type' : 'null' },
|
||||
{'type': 'array'},
|
||||
]}
|
||||
},},},},},},},},
|
||||
|
||||
{'type' : 'null' }]}}},
|
||||
'spec': { 'type': 'string'},#r'\w[\w-]*@\w[\w-]*'
|
||||
'operating_system': {
|
||||
'type': 'object',
|
||||
'required': ['name', 'version'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'version': {'type': 'string'}
|
||||
}},
|
||||
'modules': { 'anyOf': [ {'type' : 'string'},
|
||||
{'type' : 'null' },
|
||||
{'type': 'array'},
|
||||
]}
|
||||
},},},},},},
|
||||
'mirrors': {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack mirror configuration file schema',
|
||||
|
@ -1,4 +1,11 @@
|
||||
import re
|
||||
import os
|
||||
|
||||
from spack.architecture import OperatingSystem
|
||||
from spack.util.executable import *
|
||||
import spack.spec
|
||||
from spack.util.multiproc import parmap
|
||||
import spack.compilers
|
||||
|
||||
class Cnl(OperatingSystem):
|
||||
""" Compute Node Linux (CNL) is the operating system used for the Cray XC
|
||||
@ -10,4 +17,46 @@ class Cnl(OperatingSystem):
|
||||
def __init__(self):
|
||||
name = 'CNL'
|
||||
version = '10'
|
||||
super(Cnl, self).__init__(name, version, "MODULES")
|
||||
super(Cnl, self).__init__(name, version)
|
||||
|
||||
|
||||
def find_compilers(self, *paths):
|
||||
types = spack.compilers.all_compiler_types()
|
||||
compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
|
||||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x,y: x+y, compiler_lists)
|
||||
return clist
|
||||
|
||||
|
||||
def find_compiler(self, cmp_cls, *paths):
|
||||
compilers = []
|
||||
if cmp_cls.PrgEnv:
|
||||
if not cmp_cls.PrgEnv_compiler:
|
||||
tty.die('Must supply PrgEnv_compiler with PrgEnv')
|
||||
|
||||
modulecmd = which('modulecmd')
|
||||
modulecmd.add_default_arg('python')
|
||||
|
||||
# Save the environment variable to restore later
|
||||
old_modulepath = os.environ['MODULEPATH']
|
||||
# if given any explicit paths, search them for module files too
|
||||
if paths:
|
||||
module_paths = ':' + ':'.join(p for p in paths)
|
||||
os.environ['MODULEPATH'] = module_paths
|
||||
|
||||
output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
|
||||
matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
|
||||
for name, version in matches:
|
||||
v = version
|
||||
comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self,
|
||||
['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v])
|
||||
|
||||
compilers.append(comp)
|
||||
|
||||
# Restore modulepath environment variable
|
||||
if paths:
|
||||
os.environ['MODULEPATH'] = old_modulepath
|
||||
|
||||
return compilers
|
||||
|
@ -12,8 +12,18 @@
|
||||
from spack.platforms.bgq import Bgq
|
||||
from spack.platforms.darwin import Darwin
|
||||
|
||||
class ArchitectureTest(unittest.TestCase):
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
#class ArchitectureTest(unittest.TestCase):
|
||||
class ArchitectureTest(MockPackagesTest):
|
||||
|
||||
def setUp(self):
|
||||
super(ArchitectureTest, self).setUp()
|
||||
self.platform = sys_type()
|
||||
|
||||
def tearDown(self):
|
||||
super(ArchitectureTest, self).tearDown()
|
||||
|
||||
def test_dict_functions_for_architecture(self):
|
||||
arch = Arch()
|
||||
arch.platform_os = arch.platform.operating_system('default_os')
|
||||
@ -34,13 +44,13 @@ def test_dict_functions_for_architecture(self):
|
||||
self.assertTrue( isinstance(new_arch.target, Target) )
|
||||
|
||||
|
||||
def test_platform_class_and_compiler_strategies(self):
|
||||
a = CrayXc()
|
||||
t = a.operating_system('default_os')
|
||||
self.assertEquals(t.compiler_strategy, 'MODULES')
|
||||
b = Linux()
|
||||
s = b.operating_system('default_os')
|
||||
self.assertEquals(s.compiler_strategy, 'PATH')
|
||||
# def test_platform_class_and_compiler_strategies(self):
|
||||
# a = CrayXc()
|
||||
# t = a.operating_system('default_os')
|
||||
# self.assertEquals(t.compiler_strategy, 'MODULES')
|
||||
# b = Linux()
|
||||
# s = b.operating_system('default_os')
|
||||
# self.assertEquals(s.compiler_strategy, 'PATH')
|
||||
|
||||
def test_sys_type(self):
|
||||
output_platform_class = sys_type()
|
||||
@ -56,16 +66,13 @@ def test_sys_type(self):
|
||||
|
||||
self.assertEqual(str(output_platform_class), str(my_platform_class))
|
||||
|
||||
def setUp(self):
|
||||
self.platform = sys_type()
|
||||
|
||||
def test_user_front_end_input(self):
|
||||
"""Test when user inputs just frontend that both the frontend target
|
||||
and frontend operating system match
|
||||
"""
|
||||
frontend_os = self.platform.operating_system("frontend")
|
||||
frontend_target = self.platform.target("frontend")
|
||||
frontend_spec = Spec("zlib=frontend")
|
||||
frontend_spec = Spec("libelf=frontend")
|
||||
frontend_spec.concretize()
|
||||
self.assertEqual(frontend_os, frontend_spec.architecture.platform_os)
|
||||
self.assertEqual(frontend_target, frontend_spec.architecture.target)
|
||||
@ -76,7 +83,7 @@ def test_user_back_end_input(self):
|
||||
"""
|
||||
backend_os = self.platform.operating_system("backend")
|
||||
backend_target = self.platform.target("backend")
|
||||
backend_spec = Spec("zlib=backend")
|
||||
backend_spec = Spec("libelf=backend")
|
||||
backend_spec.concretize()
|
||||
self.assertEqual(backend_os, backend_spec.architecture.platform_os)
|
||||
self.assertEqual(backend_target, backend_spec.architecture.target)
|
||||
@ -85,7 +92,7 @@ def test_user_defaults(self):
|
||||
default_os = self.platform.operating_system("default_os")
|
||||
default_target = self.platform.target("default_target")
|
||||
|
||||
default_spec = Spec("zlib") # default is no args
|
||||
default_spec = Spec("libelf") # default is no args
|
||||
default_spec.concretize()
|
||||
self.assertEqual(default_os, default_spec.architecture.platform_os)
|
||||
self.assertEqual(default_target, default_spec.architecture.target)
|
||||
@ -103,11 +110,11 @@ def test_user_input_combination(self):
|
||||
for arch in combinations:
|
||||
o,t = arch
|
||||
arch_spec = "-".join(arch)
|
||||
spec = Spec("zlib=%s" % arch_spec)
|
||||
spec = Spec("libelf=%s" % arch_spec)
|
||||
spec.concretize()
|
||||
results.append(spec.architecture.platform_os == self.platform.operating_system(o))
|
||||
results.append(spec.architecture.target == self.platform.target(t))
|
||||
res = all(results)
|
||||
print res
|
||||
|
||||
self.assertTrue(res)
|
||||
|
||||
|
@ -33,43 +33,91 @@
|
||||
|
||||
# Some sample compiler config data
|
||||
a_comps = {
|
||||
"all": {
|
||||
"gcc@4.7.3" : {
|
||||
'gcc473': {
|
||||
'paths': {
|
||||
"cc" : "/gcc473",
|
||||
"cxx": "/g++473",
|
||||
"f77": None,
|
||||
"fc" : None },
|
||||
"gcc@4.5.0" : {
|
||||
"fc" : None
|
||||
},
|
||||
'modules': None,
|
||||
'spec': 'gcc@4.7.3',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
},
|
||||
'gcc450': {
|
||||
'paths': {
|
||||
"cc" : "/gcc450",
|
||||
"cxx": "/g++450",
|
||||
"f77": "/gfortran",
|
||||
"fc" : "/gfortran" },
|
||||
"clang@3.3" : {
|
||||
"f77": 'gfortran',
|
||||
"fc" : 'gfortran'
|
||||
},
|
||||
'modules': None,
|
||||
'spec': 'gcc@4.5.0',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
},
|
||||
'clang33': {
|
||||
'paths': {
|
||||
"cc" : "<overwritten>",
|
||||
"cxx": "<overwritten>",
|
||||
"f77": "<overwritten>",
|
||||
"fc" : "<overwritten>" }
|
||||
}
|
||||
"f77": '<overwritten>',
|
||||
"fc" : '<overwritten>' },
|
||||
'modules': None,
|
||||
'spec': 'clang@3.3',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b_comps = {
|
||||
"all": {
|
||||
"icc@10.0" : {
|
||||
'icc100': {
|
||||
'paths': {
|
||||
"cc" : "/icc100",
|
||||
"cxx": "/icc100",
|
||||
"cxx": "/icp100",
|
||||
"f77": None,
|
||||
"fc" : None },
|
||||
"icc@11.1" : {
|
||||
"fc" : None
|
||||
},
|
||||
'modules': None,
|
||||
'spec': 'icc@10.0',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
},
|
||||
'icc111': {
|
||||
'paths': {
|
||||
"cc" : "/icc111",
|
||||
"cxx": "/icp111",
|
||||
"f77": "/ifort",
|
||||
"fc" : "/ifort" },
|
||||
"clang@3.3" : {
|
||||
"cc" : "/clang",
|
||||
"cxx": "/clang++",
|
||||
"f77": None,
|
||||
"fc" : None}
|
||||
}
|
||||
"f77": 'ifort',
|
||||
"fc" : 'ifort'
|
||||
},
|
||||
'modules': None,
|
||||
'spec': 'icc@11.1',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
},
|
||||
'clang33': {
|
||||
'paths': {
|
||||
"cc" : "<overwritten>",
|
||||
"cxx": "<overwritten>",
|
||||
"f77": '<overwritten>',
|
||||
"fc" : '<overwritten>' },
|
||||
'modules': None,
|
||||
'spec': 'clang@3.3',
|
||||
'operating_system': {
|
||||
'name': 'CNL',
|
||||
'version': '10'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ConfigTest(MockPackagesTest):
|
||||
@ -96,7 +144,6 @@ def check_config(self, comps, *compiler_names):
|
||||
actual = config['all'][key][c]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
|
||||
def test_write_key_in_memory(self):
|
||||
# Write b_comps "on top of" a_comps.
|
||||
spack.config.update_config('compilers', a_comps, 'test_low_priority')
|
||||
|
@ -36,21 +36,50 @@
|
||||
|
||||
mock_compiler_config = """\
|
||||
compilers:
|
||||
all:
|
||||
clang@3.3:
|
||||
clang3.3CNL:
|
||||
spec: clang@3.3
|
||||
operating_system:
|
||||
name: CNL
|
||||
version: '10'
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
strategy: PATH
|
||||
modules: None
|
||||
gcc@4.5.0:
|
||||
modules: 'None'
|
||||
clang3.3RHL:
|
||||
spec: clang@3.3
|
||||
operating_system:
|
||||
name: redhat
|
||||
version: '6.7'
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
gcc4.5.0CNL:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
strategy: PATH
|
||||
modules: None
|
||||
operating_system:
|
||||
name: CNL
|
||||
version: '10'
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
gcc4.5.0RHL:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system:
|
||||
name: RHL
|
||||
version: '6.7'
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
"""
|
||||
|
||||
mock_packages_config = """\
|
||||
|
@ -243,7 +243,7 @@ def test_unsatisfiable_target(self):
|
||||
if len(platform.targets) > 1:
|
||||
first = platform.targets.values()[0].name
|
||||
second = platform.targets.values()[1].name
|
||||
set_pkg_dep('mpileaks', 'mpich='+first)
|
||||
self.set_pkg_dep('mpileaks', 'mpich='+first)
|
||||
spec = Spec('mpileaks ^mpich='+ second +' ^callpath ^dyninst ^libelf ^libdwarf')
|
||||
self.assertRaises(spack.spec.UnsatisfiableTargetSpecError, spec.normalize)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user