config: rework config system into a class instead of globals

- Current configuration code forces the config system to be initialized
  at module scope, so configs are parsed on every Spack run, essentially
  before anything else.

- We need more control over configuration init order, so move the config
  scopes into a class and reduce global state in config.py
This commit is contained in:
Todd Gamblin 2018-04-13 13:18:47 -07:00 committed by scheibelp
parent 6e398c7f08
commit d3a1ccd2fa
13 changed files with 555 additions and 470 deletions

View File

@ -39,15 +39,27 @@
import spack.store
from spack.error import SpackError
#
# Settings for commands that modify configuration
#
# Commands that modify configuration by default modify the *highest*
# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
def default_modify_scope():
"""Return the config scope that commands should modify by default.
Commands that modify configuration by default modify the *highest*
priority scope.
"""
config = spack.config.get_configuration()
return config.highest_precedence_scope().name
def default_list_scope():
"""Return the config scope that is listed by default.
Commands that list configuration list *all* scopes (merged) by default.
"""
return None
# Commands that list configuration list *all* scopes by default.
default_list_scope = None
# cmd has a submodule called "list" so preserve the python list module
python_list = list

View File

@ -46,7 +46,7 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
scopes = spack.config.config_scopes
scopes = spack.config.get_configuration().scopes
# Find
find_parser = sp.add_parser(
@ -55,7 +55,7 @@ def setup_parser(subparser):
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# Remove
@ -67,14 +67,14 @@ def setup_parser(subparser):
remove_parser.add_argument('compiler_spec')
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# List
list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_list_scope,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
# Info
@ -82,7 +82,7 @@ def setup_parser(subparser):
info_parser.add_argument('compiler_spec')
info_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_list_scope,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
@ -113,7 +113,9 @@ def compiler_find(args):
init_config=False)
n = len(new_compilers)
s = 's' if n > 1 else ''
filename = spack.config.get_config_filename(args.scope, 'compilers')
config = spack.config.get_configuration()
filename = config.get_config_filename(args.scope, 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
else:

View File

@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
import spack.config
from spack.cmd.compiler import compiler_list
description = "list available compilers"
@ -31,7 +31,7 @@
def setup_parser(subparser):
scopes = spack.config.config_scopes
scopes = spack.config.get_configuration().scopes
subparser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
help="configuration scope to read/modify")

View File

@ -32,7 +32,7 @@
def setup_parser(subparser):
# User can only choose one
subparser.add_argument(
'--scope', choices=spack.config.config_scopes,
'--scope', choices=spack.config.get_configuration().scopes,
metavar=spack.config.scopes_metavar,
help="configuration scope to read/modify")
@ -54,18 +54,21 @@ def setup_parser(subparser):
def config_get(args):
spack.config.print_section(args.section)
config = spack.config.get_configuration()
config.print_section(args.section)
def config_edit(args):
if not args.scope:
if args.section == 'compilers':
args.scope = spack.cmd.default_modify_scope
args.scope = spack.cmd.default_modify_scope()
else:
args.scope = 'user'
if not args.section:
args.section = None
config_file = spack.config.get_config_filename(args.scope, args.section)
config = spack.config.get_configuration()
config_file = config.get_config_filename(args.scope, args.section)
spack.editor(config_file)

View File

@ -67,7 +67,7 @@ def setup_parser(subparser):
const=1, default=0,
help="only fetch one 'preferred' version per spec, not all known")
scopes = spack.config.config_scopes
scopes = spack.config.get_configuration().scopes
# Add
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
@ -76,7 +76,7 @@ def setup_parser(subparser):
'url', help="url of mirror directory from 'spack mirror create'")
add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# Remove
@ -85,14 +85,14 @@ def setup_parser(subparser):
remove_parser.add_argument('name')
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# List
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_list_scope,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")

View File

@ -39,7 +39,7 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes
scopes = spack.config.get_configuration().scopes
# Create
create_parser = sp.add_parser('create', help=repo_create.__doc__)
@ -53,7 +53,7 @@ def setup_parser(subparser):
list_parser = sp.add_parser('list', help=repo_list.__doc__)
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_list_scope,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
# Add
@ -62,7 +62,7 @@ def setup_parser(subparser):
'path', help="path to a Spack package repository directory")
add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# Remove
@ -73,7 +73,7 @@ def setup_parser(subparser):
help="path or namespace of a Spack package repository")
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
default=spack.cmd.default_modify_scope,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")

View File

@ -116,11 +116,11 @@ def init_compiler_config():
def compiler_config_files():
config_files = list()
for scope in spack.config.config_scopes:
config = spack.config.get_config('compilers', scope=scope)
if config:
config_files.append(spack.config.config_scopes[scope]
.get_section_filename('compilers'))
config = spack.config.get_configuration()
for scope in config.scopes:
compiler_config = config.get_config('compilers', scope=scope)
if compiler_config:
config_files.append(config.get_config_filename(scope, 'compilers'))
return config_files
@ -338,17 +338,18 @@ def compiler_for_spec(compiler_spec, arch_spec):
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config_scopes = spack.config.config_scopes
scope_to_compilers = dict()
for scope in config_scopes:
config = spack.config.get_configuration()
scope_to_compilers = {}
for scope in config.scopes:
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec,
scope=scope, use_cache=False)
if compilers:
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = dict()
cfg_file_to_duplicates = {}
for scope, compilers in scope_to_compilers.items():
config_file = config_scopes[scope].get_section_filename('compilers')
config_file = config.get_config_filename(scope, 'compilers')
cfg_file_to_duplicates[config_file] = compilers
return cfg_file_to_duplicates

View File

@ -85,23 +85,31 @@
'config': spack.schema.config.schema,
}
#: OrderedDict of config scopes keyed by name.
#: Later scopes will override earlier scopes.
config_scopes = OrderedDict()
#: Builtin paths to configuration files in Spack
configuration_paths = (
# Default configuration scope is the lowest-level scope. These are
# versioned with Spack and can be overridden by systems, sites or users
('defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')),
# System configuration is per machine.
# No system-level configs should be checked into spack by default
('system', os.path.join(spack.paths.system_etc_path, 'spack')),
# Site configuration is per spack instance, for sites or projects
# No site-level configs should be checked into spack by default.
('site', os.path.join(spack.paths.etc_path, 'spack')),
# User configuration can override both spack defaults and site config
('user', spack.paths.user_config_path)
)
#: metavar to use for commands that accept scopes
#: this is shorter and more readable than listing all choices
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
def validate_section_name(section):
"""Exit if the section is not a valid section."""
if section not in section_schemas:
tty.die("Invalid config section: '%s'. Options are: %s"
% (section, " ".join(section_schemas.keys())))
def extend_with_default(validator_class):
def _extend_with_default(validator_class):
"""Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only
@ -141,20 +149,8 @@ def set_pp_defaults(validator, properties, instance, schema):
})
DefaultSettingValidator = extend_with_default(Draft4Validator)
def validate_section(data, schema):
"""Validate data read in from a Spack YAML file.
This leverages the line information (start_mark, end_mark) stored
on Spack YAML structures.
"""
try:
DefaultSettingValidator(schema).validate(data)
except jsonschema.ValidationError as e:
raise ConfigFormatError(e, data)
#: the validator we use for Spack config files
DefaultSettingValidator = _extend_with_default(Draft4Validator)
class ConfigScope(object):
@ -169,13 +165,8 @@ def __init__(self, name, path):
self.path = path # path to directory containing configs.
self.sections = syaml.syaml_dict() # sections read from config files.
# Register in a dict of all ConfigScopes
# TODO: make this cleaner. Mocking up for testing is brittle.
global config_scopes
config_scopes[name] = self
def get_section_filename(self, section):
validate_section_name(section)
_validate_section_name(section)
return os.path.join(self.path, "%s.yaml" % section)
def get_section(self, section):
@ -192,7 +183,7 @@ def write_section(self, section):
try:
mkdirp(self.path)
with open(filename, 'w') as f:
validate_section(data, section_schemas[section])
_validate_section(data, section_schemas[section])
syaml.dump(data, stream=f, default_flow_style=False)
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
@ -208,61 +199,229 @@ def __repr__(self):
return '<ConfigScope: %s: %s>' % (self.name, self.path)
#
# Below are configuration scopes.
#
# Each scope can have per-platfom overrides in subdirectories of the
# configuration directory.
#
_platform = spack.architecture.platform().name
class Configuration(object):
"""A full Spack configuration, from a hierarchy of config files.
#: Default configuration scope is the lowest-level scope. These are
#: versioned with Spack and can be overridden by systems, sites or users.
_defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
ConfigScope('defaults', _defaults_path)
ConfigScope('defaults/%s' % _platform, os.path.join(_defaults_path, _platform))
This class makes it easy to add a new scope on top of an existing one.
"""
#: System configuration is per machine.
#: No system-level configs should be checked into spack by default
_system_path = os.path.join(spack.paths.system_etc_path, 'spack')
ConfigScope('system', _system_path)
ConfigScope('system/%s' % _platform, os.path.join(_system_path, _platform))
def __init__(self, *scopes):
"""Initialize a configuration with an initial list of scopes.
#: Site configuration is per spack instance, for sites or projects.
#: No site-level configs should be checked into spack by default.
_site_path = os.path.join(spack.paths.etc_path, 'spack')
ConfigScope('site', _site_path)
ConfigScope('site/%s' % _platform, os.path.join(_site_path, _platform))
Args:
scopes (list of ConfigScope): list of scopes to add to this
Configuration, ordered from lowest to highest precedence
#: User configuration can override both spack defaults and site config.
_user_path = spack.paths.user_config_path
ConfigScope('user', _user_path)
ConfigScope('user/%s' % _platform, os.path.join(_user_path, _platform))
"""
self.scopes = OrderedDict()
for scope in scopes:
self.push_scope(scope)
def push_scope(self, scope):
"""Add a higher precedence scope to the Configuration."""
self.scopes[scope.name] = scope
def pop_scope(self):
"""Remove the highest precedence scope and return it."""
name, scope = self.scopes.popitem(last=True)
return scope
def highest_precedence_scope(self):
"""Get the scope with highest precedence (prefs will override others).
"""
return list(self.scopes.values())[-1]
def _validate_scope(self, scope):
"""Ensure that scope is valid in this configuration.
This should be used by routines in ``config.py`` to validate
scope name arguments, and to determine a default scope where no
scope is specified.
Raises:
ValueError: if ``scope`` is not valid
Returns:
ConfigScope: a valid ConfigScope if ``scope`` is ``None`` or valid
"""
if scope is None:
# default to the scope with highest precedence.
return self.highest_precedence_scope()
elif scope in self.scopes:
return self.scopes[scope]
else:
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
% (scope, self.scopes.keys()))
def get_config_filename(self, scope, section):
"""For some scope and section, get the name of the configuration file.
"""
scope = self._validate_scope(scope)
return scope.get_section_filename(section)
def clear_caches(self):
"""Clears the caches for configuration files,
This will cause files to be re-read upon the next request."""
for scope in self.scopes.values():
scope.clear()
def update_config(self, section, update_data, scope=None):
"""Update the configuration file for a particular scope.
Overwrites contents of a section in a scope with update_data,
then writes out the config file.
update_data should have the top-level section name stripped off
(it will be re-added). Data itself can be a list, dict, or any
other yaml-ish structure.
"""
_validate_section_name(section) # validate section name
scope = self._validate_scope(scope) # get ConfigScope object
# read only the requested section's data.
scope.sections[section] = {section: update_data}
scope.write_section(section)
def get_config(self, section, scope=None):
"""Get configuration settings for a section.
If ``scope`` is ``None`` or not provided, return the merged contents
of all of Spack's configuration scopes. If ``scope`` is provided,
return only the confiugration as specified in that scope.
This off the top-level name from the YAML section. That is, for a
YAML config file that looks like this::
config:
install_tree: $spack/opt/spack
module_roots:
lmod: $spack/share/spack/lmod
``get_config('config')`` will return::
{ 'install_tree': '$spack/opt/spack',
'module_roots: {
'lmod': '$spack/share/spack/lmod'
}
}
"""
_validate_section_name(section)
merged_section = syaml.syaml_dict()
if scope is None:
scopes = self.scopes.values()
else:
scopes = [self._validate_scope(scope)]
for scope in scopes:
# read potentially cached data from the scope.
data = scope.get_section(section)
# Skip empty configs
if not data or not isinstance(data, dict):
continue
if section not in data:
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
continue
merged_section = _merge_yaml(merged_section, data)
# no config files -- empty config.
if section not in merged_section:
return {}
# take the top key off before returning.
return merged_section[section]
def __iter__(self):
"""Iterate over scopes in this configuration."""
for scope in self.scopes.values():
yield scope
def print_section(self, section):
"""Print a configuration to stdout."""
try:
data = syaml.syaml_dict()
data[section] = self.get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
def highest_precedence_scope():
"""Get the scope with highest precedence (prefs will override others)."""
return list(config_scopes.values())[-1]
def get_configuration():
"""This constructs Spack's standard configuration scopes
This is a singleton; it constructs one instance associated with this
module and returns it. It is bundled inside a function so that
configuratoin can be initialized lazily.
def validate_scope(scope):
"""Ensure that scope is valid, and return a valid scope if it is None.
This should be used by routines in ``config.py`` to validate
scope name arguments, and to determine a default scope where no
scope is specified.
Return:
Configuration: object for accessing spack configuration
"""
if scope is None:
# default to the scope with highest precedence.
return highest_precedence_scope()
global _configuration
if not _configuration:
# Each scope can have per-platfom overrides in subdirectories of the
# configuration directory.
platform = spack.architecture.platform().name
elif scope in config_scopes:
return config_scopes[scope]
scopes = []
for name, path in configuration_paths:
# add the regular scope
scopes.append(ConfigScope(name, path))
else:
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
% (scope, config_scopes.keys()))
# add platform-specific scope
plat_name = '%s/%s' % (name, platform)
plat_path = os.path.join(path, platform)
scopes.append(ConfigScope(plat_name, plat_path))
_configuration = Configuration(*scopes)
return _configuration
#: This is the global singleton configuration for Spack.
#: TODO: consider making this NOT global and associate it with a spack instance
_configuration = None
#: TODO: consider getting rid of these top-level wrapper functions.
def get_config(section, scope=None):
"""Module-level interface for ``Configuration.get_config()``."""
config = get_configuration()
return config.get_config(section, scope)
def update_config(section, update_data, scope=None):
"""Module-level interface for ``Configuration.update_config()``."""
config = get_configuration()
return config.update_config(section, update_data, scope)
def _validate_section_name(section):
"""Exit if the section is not a valid section."""
if section not in section_schemas:
tty.die("Invalid config section: '%s'. Options are: %s"
% (section, " ".join(section_schemas.keys())))
def _validate_section(data, schema):
"""Validate data read in from a Spack YAML file.
This leverages the line information (start_mark, end_mark) stored
on Spack YAML structures.
"""
try:
DefaultSettingValidator(schema).validate(data)
except jsonschema.ValidationError as e:
raise ConfigFormatError(e, data)
def _read_config_file(filename, schema):
@ -284,7 +443,7 @@ def _read_config_file(filename, schema):
data = _mark_overrides(syaml.load(f))
if data:
validate_section(data, schema)
_validate_section(data, schema)
return data
except MarkedYAMLError as e:
@ -296,14 +455,7 @@ def _read_config_file(filename, schema):
"Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches():
"""Clears the caches for configuration files, which will cause them
to be re-read upon the next request"""
for scope in config_scopes.values():
scope.clear()
def override(string):
def _override(string):
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
@ -363,7 +515,7 @@ def they_are(t):
# Source dict is merged into dest.
elif they_are(dict):
for sk, sv in iteritems(source):
if override(sk) or sk not in dest:
if _override(sk) or sk not in dest:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.copy(sv)
else:
@ -376,109 +528,12 @@ def they_are(t):
return copy.copy(source)
def get_config(section, scope=None):
"""Get configuration settings for a section.
If ``scope`` is ``None`` or not provided, return the merged contents
of all of Spack's configuration scopes. If ``scope`` is provided,
return only the confiugration as specified in that scope.
This off the top-level name from the YAML section. That is, for a
YAML config file that looks like this::
config:
install_tree: $spack/opt/spack
module_roots:
lmod: $spack/share/spack/lmod
``get_config('config')`` will return::
{ 'install_tree': '$spack/opt/spack',
'module_roots: {
'lmod': '$spack/share/spack/lmod'
}
}
"""
validate_section_name(section)
merged_section = syaml.syaml_dict()
if scope is None:
scopes = config_scopes.values()
else:
scopes = [validate_scope(scope)]
for scope in scopes:
# read potentially cached data from the scope.
data = scope.get_section(section)
# Skip empty configs
if not data or not isinstance(data, dict):
continue
if section not in data:
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
continue
merged_section = _merge_yaml(merged_section, data)
# no config files -- empty config.
if section not in merged_section:
return {}
# take the top key off before returning.
return merged_section[section]
def get_config_filename(scope, section):
"""For some scope and section, get the name of the configuration file"""
scope = validate_scope(scope)
return scope.get_section_filename(section)
def update_config(section, update_data, scope=None):
"""Update the configuration file for a particular scope.
Overwrites contents of a section in a scope with update_data,
then writes out the config file.
update_data should have the top-level section name stripped off
(it will be re-added). Data itself can be a list, dict, or any
other yaml-ish structure.
"""
validate_section_name(section) # validate section name
scope = validate_scope(scope) # get ConfigScope object from string.
# read only the requested section's data.
scope.sections[section] = {section: update_data}
scope.write_section(section)
def print_section(section):
"""Print a configuration to stdout."""
try:
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
class ConfigError(SpackError):
pass
"""Superclass for all Spack config related errors."""
class ConfigFileError(ConfigError):
pass
def get_path(path, data):
if path:
return get_path(path[1:], data[path[0]])
else:
return data
"""Issue reading or accessing a configuration file."""
class ConfigFormatError(ConfigError):
@ -490,6 +545,12 @@ def __init__(self, validation_error, data):
parent_mark = getattr(validation_error.parent, '_start_mark', None)
path = [str(s) for s in getattr(validation_error, 'path', None)]
def get_path(path, data):
if path:
return get_path(path[1:], data[path[0]])
else:
return data
# Try really hard to get the parent (which sometimes is not
# set) This digs it out of the validated structure if it's not
# on the validation_error.

View File

@ -36,8 +36,9 @@ def pre_run():
def check_compiler_yaml_version():
config_scopes = spack.config.config_scopes
for scope in config_scopes.values():
config = spack.config.get_configuration()
for scope in config:
file_name = os.path.join(scope.path, 'compilers.yaml')
data = None
if os.path.isfile(file_name):

View File

@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import pytest
from six import iteritems
import spack.spec
@ -30,24 +29,23 @@
from spack.compiler import _get_versioned_tuple
@pytest.mark.usefixtures('config')
class TestCompilers(object):
def test_get_compiler_duplicates(config):
# In this case there is only one instance of the specified compiler in
# the test configuration (so it is not actually a duplicate), but the
# method behaves the same.
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
def test_get_compiler_duplicates(self):
# In this case there is only one instance of the specified compiler in
# the test configuration (so it is not actually a duplicate), but the
# method behaves the same.
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
assert len(duplicates) == 1
assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
assert len(duplicates) == 1
def test_all_compilers(self):
all_compilers = compilers.all_compilers()
filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
filtered = [x for x in filtered if x.operating_system == 'SuSE11']
assert len(filtered) == 1
def test_all_compilers(config):
all_compilers = compilers.all_compilers()
filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
filtered = [x for x in filtered if x.operating_system == 'SuSE11']
assert len(filtered) == 1
def test_version_detection_is_empty():

View File

@ -24,26 +24,23 @@
##############################################################################
import pytest
import spack
import spack.util.spack_yaml as syaml
from spack.spec import Spec
import spack.package_prefs
import spack.util.spack_yaml as syaml
from spack.config import ConfigScope
from spack.spec import Spec
@pytest.fixture()
def concretize_scope(config, tmpdir):
"""Adds a scope for concretization preferences"""
tmpdir.ensure_dir('concretize')
spack.config.ConfigScope(
'concretize', str(tmpdir.join('concretize'))
)
yield
# This is kind of weird, but that's how config scopes are
# set in ConfigScope.__init__
spack.config.config_scopes.pop('concretize')
spack.package_prefs.PackagePrefs.clear_caches()
config.push_scope(
ConfigScope('concretize', str(tmpdir.join('concretize'))))
# reset provider index each time, too
yield
config.pop_scope()
spack.package_prefs.PackagePrefs.clear_caches()
spack.repo._provider_index = None

View File

@ -33,9 +33,84 @@
import spack.paths
import spack.config
from spack.util.path import canonicalize_path
from spack.util.ordereddict import OrderedDict
# Some sample compiler config data
# sample config data
config_low = {
'config': {
'install_tree': 'install_tree_path',
'build_stage': ['path1', 'path2', 'path3']}}
config_override_all = {
'config:': {
'install_tree:': 'override_all'}}
config_override_key = {
'config': {
'install_tree:': 'override_key'}}
config_merge_list = {
'config': {
'build_stage': ['patha', 'pathb']}}
config_override_list = {
'config': {
'build_stage:': ['patha', 'pathb']}}
@pytest.fixture()
def config(tmpdir):
"""Mocks the configuration scope."""
real_configuration = spack.config._configuration
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
for name in ['low', 'high']]
spack.config._configuration = spack.config.Configuration(*scopes)
yield
spack.config._configuration = real_configuration
@pytest.fixture()
def write_config_file(tmpdir):
"""Returns a function that writes a config file."""
def _write(config, data, scope):
config_yaml = tmpdir.join(scope, config + '.yaml')
config_yaml.ensure()
with config_yaml.open('w') as f:
yaml.dump(data, f)
return _write
def check_compiler_config(comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc']
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
'ldflags', 'ldlibs']
param_list = ['modules', 'paths', 'spec', 'operating_system']
for compiler in config:
conf = compiler['compiler']
if conf['spec'] in compiler_names:
comp = next((c['compiler'] for c in comps if
c['compiler']['spec'] == conf['spec']), None)
if not comp:
raise ValueError('Bad config spec')
for p in param_list:
assert conf[p] == comp[p]
for f in flag_list:
expected = comp.get('flags', {}).get(f, None)
actual = conf.get('flags', {}).get(f, None)
assert expected == actual
for c in compiler_list:
expected = comp['paths'][c]
actual = conf['paths'][c]
assert expected == actual
#
# Some sample compiler config data and tests.
#
a_comps = {
'compilers': [
{'compiler': {
@ -140,32 +215,110 @@
]
}
# Some Sample repo data
@pytest.fixture()
def compiler_specs():
"""Returns a couple of compiler specs needed for the tests"""
a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
return CompilerSpecs(a=a, b=b)
def test_write_key_in_memory(config, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Make sure the config looks how we expect.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
def test_write_key_to_disk(config, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
def test_write_to_same_priority_file(config, compiler_specs):
# Write b_comps in the same file as a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='low'
)
# Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
#
# Sample repo data and tests
#
repos_low = {'repos': ["/some/path"]}
repos_high = {'repos': ["/some/other/path"]}
# sample config data
config_low = {
'config': {
'install_tree': 'install_tree_path',
'build_stage': ['path1', 'path2', 'path3']}}
# repos
def test_write_list_in_memory(config):
spack.config.update_config('repos', repos_low['repos'], scope='low')
spack.config.update_config('repos', repos_high['repos'], scope='high')
config_override_all = {
'config:': {
'install_tree:': 'override_all'}}
config = spack.config.get_config('repos')
assert config == repos_high['repos'] + repos_low['repos']
config_override_key = {
'config': {
'install_tree:': 'override_key'}}
config_merge_list = {
'config': {
'build_stage': ['patha', 'pathb']}}
def test_substitute_config_variables(config):
prefix = spack.paths.prefix.lstrip('/')
config_override_list = {
'config': {
'build_stage:': ['patha', 'pathb']}}
assert os.path.join(
'/foo/bar/baz', prefix
) == canonicalize_path('/foo/bar/baz/$spack')
assert os.path.join(
spack.paths.prefix, 'foo/bar/baz'
) == canonicalize_path('$spack/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix
) == canonicalize_path('/foo/bar/baz/${spack}')
assert os.path.join(
spack.paths.prefix, 'foo/bar/baz'
) == canonicalize_path('${spack}/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
packages_merge_low = {
@ -212,208 +365,59 @@ def test_merge_with_defaults(config, write_config_file):
assert cfg['baz']['version'] == ['c']
def check_compiler_config(comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc']
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
'ldflags', 'ldlibs']
param_list = ['modules', 'paths', 'spec', 'operating_system']
for compiler in config:
conf = compiler['compiler']
if conf['spec'] in compiler_names:
comp = next((c['compiler'] for c in comps if
c['compiler']['spec'] == conf['spec']), None)
if not comp:
raise ValueError('Bad config spec')
for p in param_list:
assert conf[p] == comp[p]
for f in flag_list:
expected = comp.get('flags', {}).get(f, None)
actual = conf.get('flags', {}).get(f, None)
assert expected == actual
for c in compiler_list:
expected = comp['paths'][c]
actual = conf['paths'][c]
assert expected == actual
def test_substitute_user(config):
user = getpass.getuser()
assert '/foo/bar/' + user + '/baz' == canonicalize_path(
'/foo/bar/$user/baz'
)
@pytest.fixture()
def config(tmpdir):
"""Mocks the configuration scope."""
spack.config.clear_config_caches()
real_scope = spack.config.config_scopes
spack.config.config_scopes = OrderedDict()
for priority in ['low', 'high']:
spack.config.ConfigScope(priority, str(tmpdir.join(priority)))
Config = collections.namedtuple('Config', ['real', 'mock'])
yield Config(real=real_scope, mock=spack.config.config_scopes)
spack.config.config_scopes = real_scope
spack.config.clear_config_caches()
def test_substitute_tempdir(config):
tempdir = tempfile.gettempdir()
assert tempdir == canonicalize_path('$tempdir')
assert tempdir + '/foo/bar/baz' == canonicalize_path(
'$tempdir/foo/bar/baz'
)
@pytest.fixture()
def write_config_file(tmpdir):
"""Returns a function that writes a config file."""
def _write(config, data, scope):
config_yaml = tmpdir.join(scope, config + '.yaml')
config_yaml.ensure()
with config_yaml.open('w') as f:
yaml.dump(data, f)
return _write
def test_read_config(config, write_config_file):
write_config_file('config', config_low, 'low')
assert spack.config.get_config('config') == config_low['config']
@pytest.fixture()
def compiler_specs():
"""Returns a couple of compiler specs needed for the tests"""
a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
return CompilerSpecs(a=a, b=b)
def test_read_config_override_all(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_all, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'override_all'
}
@pytest.mark.usefixtures('config')
class TestConfig(object):
def test_read_config_override_key(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_key, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'override_key',
'build_stage': ['path1', 'path2', 'path3']
}
def test_write_list_in_memory(self):
spack.config.update_config('repos', repos_low['repos'], scope='low')
spack.config.update_config('repos', repos_high['repos'], scope='high')
config = spack.config.get_config('repos')
assert config == repos_high['repos'] + repos_low['repos']
def test_read_config_merge_list(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_merge_list, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
}
def test_write_key_in_memory(self, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Make sure the config looks how we expect.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
def test_write_key_to_disk(self, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
def test_write_to_same_priority_file(self, compiler_specs):
# Write b_comps in the same file as a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='low'
)
# Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
def check_canonical(self, var, expected):
"""Ensure that <expected> is substituted properly for <var> in strings
containing <var> in various positions."""
path = '/foo/bar/baz'
self.assertEqual(canonicalize_path(var + path),
expected + path)
self.assertEqual(canonicalize_path(path + var),
path + '/' + expected)
self.assertEqual(canonicalize_path(path + var + path),
expected + path)
def test_substitute_config_variables(self):
prefix = spack.paths.prefix.lstrip('/')
assert os.path.join(
'/foo/bar/baz', prefix
) == canonicalize_path('/foo/bar/baz/$spack')
assert os.path.join(
spack.paths.prefix, 'foo/bar/baz'
) == canonicalize_path('$spack/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix
) == canonicalize_path('/foo/bar/baz/${spack}')
assert os.path.join(
spack.paths.prefix, 'foo/bar/baz'
) == canonicalize_path('${spack}/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
assert os.path.join(
'/foo/bar/baz', prefix, 'foo/bar/baz'
) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
def test_substitute_user(self):
user = getpass.getuser()
assert '/foo/bar/' + user + '/baz' == canonicalize_path(
'/foo/bar/$user/baz'
)
def test_substitute_tempdir(self):
tempdir = tempfile.gettempdir()
assert tempdir == canonicalize_path('$tempdir')
assert tempdir + '/foo/bar/baz' == canonicalize_path(
'$tempdir/foo/bar/baz'
)
def test_read_config(self, write_config_file):
write_config_file('config', config_low, 'low')
assert spack.config.get_config('config') == config_low['config']
def test_read_config_override_all(self, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_all, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'override_all'
}
def test_read_config_override_key(self, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_key, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'override_key',
'build_stage': ['path1', 'path2', 'path3']
}
def test_read_config_merge_list(self, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_merge_list, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
}
def test_read_config_override_list(self, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_list, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb']
}
def test_read_config_override_list(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_list, 'high')
assert spack.config.get_config('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb']
}
def test_keys_are_ordered():

View File

@ -216,17 +216,21 @@ def configuration_dir(tmpdir_factory, linux_os):
directory path.
"""
tmpdir = tmpdir_factory.mktemp('configurations')
# Name of the yaml files in the test/data folder
test_path = py.path.local(spack.paths.test_path)
compilers_yaml = test_path.join('data', 'compilers.yaml')
packages_yaml = test_path.join('data', 'packages.yaml')
config_yaml = test_path.join('data', 'config.yaml')
# Create temporary 'site' and 'user' folders
tmpdir.ensure('site', dir=True)
tmpdir.ensure('user', dir=True)
# Copy the configurations that don't need further work
packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
config_yaml.copy(tmpdir.join('site', 'config.yaml'))
# Write the one that needs modifications
content = ''.join(compilers_yaml.read()).format(linux_os)
t = tmpdir.join('site', 'compilers.yaml')
@ -239,18 +243,20 @@ def config(configuration_dir):
"""Hooks the mock configuration files into spack.config"""
# Set up a mock config scope
spack.package_prefs.PackagePrefs.clear_caches()
spack.config.clear_config_caches()
real_scope = spack.config.config_scopes
spack.config.config_scopes = spack.util.ordereddict.OrderedDict()
spack.config.ConfigScope('site', str(configuration_dir.join('site')))
spack.config.ConfigScope('system', str(configuration_dir.join('system')))
spack.config.ConfigScope('user', str(configuration_dir.join('user')))
Config = collections.namedtuple('Config', ['real', 'mock'])
yield Config(real=real_scope, mock=spack.config.config_scopes)
real_configuration = spack.config._configuration
spack.config.config_scopes = real_scope
spack.config.clear_config_caches()
print real_configuration
scopes = [
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
for name in ['site', 'system', 'user']]
config = spack.config.Configuration(*scopes)
spack.config._configuration = config
yield config
spack.config._configuration = real_configuration
spack.package_prefs.PackagePrefs.clear_caches()