config: rework config system into a class instead of globals
- Current configuration code forces the config system to be initialized at module scope, so configs are parsed on every Spack run, essentially before anything else. - We need more control over configuration init order, so move the config scopes into a class and reduce global state in config.py
This commit is contained in:
@@ -39,15 +39,27 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Settings for commands that modify configuration
|
# Settings for commands that modify configuration
|
||||||
#
|
#
|
||||||
# Commands that modify configuration by default modify the *highest*
|
def default_modify_scope():
|
||||||
# priority scope.
|
"""Return the config scope that commands should modify by default.
|
||||||
default_modify_scope = spack.config.highest_precedence_scope().name
|
|
||||||
|
Commands that modify configuration by default modify the *highest*
|
||||||
|
priority scope.
|
||||||
|
"""
|
||||||
|
config = spack.config.get_configuration()
|
||||||
|
return config.highest_precedence_scope().name
|
||||||
|
|
||||||
|
|
||||||
|
def default_list_scope():
|
||||||
|
"""Return the config scope that is listed by default.
|
||||||
|
|
||||||
|
Commands that list configuration list *all* scopes (merged) by default.
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
|
||||||
# Commands that list configuration list *all* scopes by default.
|
|
||||||
default_list_scope = None
|
|
||||||
|
|
||||||
# cmd has a submodule called "list" so preserve the python list module
|
# cmd has a submodule called "list" so preserve the python list module
|
||||||
python_list = list
|
python_list = list
|
||||||
|
@@ -46,7 +46,7 @@ def setup_parser(subparser):
|
|||||||
sp = subparser.add_subparsers(
|
sp = subparser.add_subparsers(
|
||||||
metavar='SUBCOMMAND', dest='compiler_command')
|
metavar='SUBCOMMAND', dest='compiler_command')
|
||||||
|
|
||||||
scopes = spack.config.config_scopes
|
scopes = spack.config.get_configuration().scopes
|
||||||
|
|
||||||
# Find
|
# Find
|
||||||
find_parser = sp.add_parser(
|
find_parser = sp.add_parser(
|
||||||
@@ -55,7 +55,7 @@ def setup_parser(subparser):
|
|||||||
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
@@ -67,14 +67,14 @@ def setup_parser(subparser):
|
|||||||
remove_parser.add_argument('compiler_spec')
|
remove_parser.add_argument('compiler_spec')
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
# List
|
# List
|
||||||
list_parser = sp.add_parser('list', help='list available compilers')
|
list_parser = sp.add_parser('list', help='list available compilers')
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_list_scope,
|
default=spack.cmd.default_list_scope(),
|
||||||
help="configuration scope to read from")
|
help="configuration scope to read from")
|
||||||
|
|
||||||
# Info
|
# Info
|
||||||
@@ -82,7 +82,7 @@ def setup_parser(subparser):
|
|||||||
info_parser.add_argument('compiler_spec')
|
info_parser.add_argument('compiler_spec')
|
||||||
info_parser.add_argument(
|
info_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_list_scope,
|
default=spack.cmd.default_list_scope(),
|
||||||
help="configuration scope to read from")
|
help="configuration scope to read from")
|
||||||
|
|
||||||
|
|
||||||
@@ -113,7 +113,9 @@ def compiler_find(args):
|
|||||||
init_config=False)
|
init_config=False)
|
||||||
n = len(new_compilers)
|
n = len(new_compilers)
|
||||||
s = 's' if n > 1 else ''
|
s = 's' if n > 1 else ''
|
||||||
filename = spack.config.get_config_filename(args.scope, 'compilers')
|
|
||||||
|
config = spack.config.get_configuration()
|
||||||
|
filename = config.get_config_filename(args.scope, 'compilers')
|
||||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
||||||
else:
|
else:
|
||||||
|
@@ -22,7 +22,7 @@
|
|||||||
# License along with this program; if not, write to the Free Software
|
# License along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import spack
|
import spack.config
|
||||||
from spack.cmd.compiler import compiler_list
|
from spack.cmd.compiler import compiler_list
|
||||||
|
|
||||||
description = "list available compilers"
|
description = "list available compilers"
|
||||||
@@ -31,7 +31,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
scopes = spack.config.config_scopes
|
scopes = spack.config.get_configuration().scopes
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
help="configuration scope to read/modify")
|
help="configuration scope to read/modify")
|
||||||
|
@@ -32,7 +32,7 @@
|
|||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
# User can only choose one
|
# User can only choose one
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--scope', choices=spack.config.config_scopes,
|
'--scope', choices=spack.config.get_configuration().scopes,
|
||||||
metavar=spack.config.scopes_metavar,
|
metavar=spack.config.scopes_metavar,
|
||||||
help="configuration scope to read/modify")
|
help="configuration scope to read/modify")
|
||||||
|
|
||||||
@@ -54,18 +54,21 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def config_get(args):
|
def config_get(args):
|
||||||
spack.config.print_section(args.section)
|
config = spack.config.get_configuration()
|
||||||
|
config.print_section(args.section)
|
||||||
|
|
||||||
|
|
||||||
def config_edit(args):
|
def config_edit(args):
|
||||||
if not args.scope:
|
if not args.scope:
|
||||||
if args.section == 'compilers':
|
if args.section == 'compilers':
|
||||||
args.scope = spack.cmd.default_modify_scope
|
args.scope = spack.cmd.default_modify_scope()
|
||||||
else:
|
else:
|
||||||
args.scope = 'user'
|
args.scope = 'user'
|
||||||
if not args.section:
|
if not args.section:
|
||||||
args.section = None
|
args.section = None
|
||||||
config_file = spack.config.get_config_filename(args.scope, args.section)
|
|
||||||
|
config = spack.config.get_configuration()
|
||||||
|
config_file = config.get_config_filename(args.scope, args.section)
|
||||||
spack.editor(config_file)
|
spack.editor(config_file)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -67,7 +67,7 @@ def setup_parser(subparser):
|
|||||||
const=1, default=0,
|
const=1, default=0,
|
||||||
help="only fetch one 'preferred' version per spec, not all known")
|
help="only fetch one 'preferred' version per spec, not all known")
|
||||||
|
|
||||||
scopes = spack.config.config_scopes
|
scopes = spack.config.get_configuration().scopes
|
||||||
|
|
||||||
# Add
|
# Add
|
||||||
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
||||||
@@ -76,7 +76,7 @@ def setup_parser(subparser):
|
|||||||
'url', help="url of mirror directory from 'spack mirror create'")
|
'url', help="url of mirror directory from 'spack mirror create'")
|
||||||
add_parser.add_argument(
|
add_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
@@ -85,14 +85,14 @@ def setup_parser(subparser):
|
|||||||
remove_parser.add_argument('name')
|
remove_parser.add_argument('name')
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
# List
|
# List
|
||||||
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_list_scope,
|
default=spack.cmd.default_list_scope(),
|
||||||
help="configuration scope to read from")
|
help="configuration scope to read from")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -39,7 +39,7 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
|
||||||
scopes = spack.config.config_scopes
|
scopes = spack.config.get_configuration().scopes
|
||||||
|
|
||||||
# Create
|
# Create
|
||||||
create_parser = sp.add_parser('create', help=repo_create.__doc__)
|
create_parser = sp.add_parser('create', help=repo_create.__doc__)
|
||||||
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
|||||||
list_parser = sp.add_parser('list', help=repo_list.__doc__)
|
list_parser = sp.add_parser('list', help=repo_list.__doc__)
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_list_scope,
|
default=spack.cmd.default_list_scope(),
|
||||||
help="configuration scope to read from")
|
help="configuration scope to read from")
|
||||||
|
|
||||||
# Add
|
# Add
|
||||||
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
|||||||
'path', help="path to a Spack package repository directory")
|
'path', help="path to a Spack package repository directory")
|
||||||
add_parser.add_argument(
|
add_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
@@ -73,7 +73,7 @@ def setup_parser(subparser):
|
|||||||
help="path or namespace of a Spack package repository")
|
help="path or namespace of a Spack package repository")
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
|
||||||
default=spack.cmd.default_modify_scope,
|
default=spack.cmd.default_modify_scope(),
|
||||||
help="configuration scope to modify")
|
help="configuration scope to modify")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -116,11 +116,11 @@ def init_compiler_config():
|
|||||||
|
|
||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
config_files = list()
|
config_files = list()
|
||||||
for scope in spack.config.config_scopes:
|
config = spack.config.get_configuration()
|
||||||
config = spack.config.get_config('compilers', scope=scope)
|
for scope in config.scopes:
|
||||||
if config:
|
compiler_config = config.get_config('compilers', scope=scope)
|
||||||
config_files.append(spack.config.config_scopes[scope]
|
if compiler_config:
|
||||||
.get_section_filename('compilers'))
|
config_files.append(config.get_config_filename(scope, 'compilers'))
|
||||||
return config_files
|
return config_files
|
||||||
|
|
||||||
|
|
||||||
@@ -338,17 +338,18 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
|||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||||
config_scopes = spack.config.config_scopes
|
config = spack.config.get_configuration()
|
||||||
scope_to_compilers = dict()
|
|
||||||
for scope in config_scopes:
|
scope_to_compilers = {}
|
||||||
|
for scope in config.scopes:
|
||||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec,
|
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec,
|
||||||
scope=scope, use_cache=False)
|
scope=scope, use_cache=False)
|
||||||
if compilers:
|
if compilers:
|
||||||
scope_to_compilers[scope] = compilers
|
scope_to_compilers[scope] = compilers
|
||||||
|
|
||||||
cfg_file_to_duplicates = dict()
|
cfg_file_to_duplicates = {}
|
||||||
for scope, compilers in scope_to_compilers.items():
|
for scope, compilers in scope_to_compilers.items():
|
||||||
config_file = config_scopes[scope].get_section_filename('compilers')
|
config_file = config.get_config_filename(scope, 'compilers')
|
||||||
cfg_file_to_duplicates[config_file] = compilers
|
cfg_file_to_duplicates[config_file] = compilers
|
||||||
|
|
||||||
return cfg_file_to_duplicates
|
return cfg_file_to_duplicates
|
||||||
|
@@ -85,23 +85,31 @@
|
|||||||
'config': spack.schema.config.schema,
|
'config': spack.schema.config.schema,
|
||||||
}
|
}
|
||||||
|
|
||||||
#: OrderedDict of config scopes keyed by name.
|
#: Builtin paths to configuration files in Spack
|
||||||
#: Later scopes will override earlier scopes.
|
configuration_paths = (
|
||||||
config_scopes = OrderedDict()
|
# Default configuration scope is the lowest-level scope. These are
|
||||||
|
# versioned with Spack and can be overridden by systems, sites or users
|
||||||
|
('defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')),
|
||||||
|
|
||||||
|
# System configuration is per machine.
|
||||||
|
# No system-level configs should be checked into spack by default
|
||||||
|
('system', os.path.join(spack.paths.system_etc_path, 'spack')),
|
||||||
|
|
||||||
|
# Site configuration is per spack instance, for sites or projects
|
||||||
|
# No site-level configs should be checked into spack by default.
|
||||||
|
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||||
|
|
||||||
|
# User configuration can override both spack defaults and site config
|
||||||
|
('user', spack.paths.user_config_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#: metavar to use for commands that accept scopes
|
#: metavar to use for commands that accept scopes
|
||||||
#: this is shorter and more readable than listing all choices
|
#: this is shorter and more readable than listing all choices
|
||||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
||||||
|
|
||||||
|
|
||||||
def validate_section_name(section):
|
def _extend_with_default(validator_class):
|
||||||
"""Exit if the section is not a valid section."""
|
|
||||||
if section not in section_schemas:
|
|
||||||
tty.die("Invalid config section: '%s'. Options are: %s"
|
|
||||||
% (section, " ".join(section_schemas.keys())))
|
|
||||||
|
|
||||||
|
|
||||||
def extend_with_default(validator_class):
|
|
||||||
"""Add support for the 'default' attr for properties and patternProperties.
|
"""Add support for the 'default' attr for properties and patternProperties.
|
||||||
|
|
||||||
jsonschema does not handle this out of the box -- it only
|
jsonschema does not handle this out of the box -- it only
|
||||||
@@ -141,20 +149,8 @@ def set_pp_defaults(validator, properties, instance, schema):
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
DefaultSettingValidator = extend_with_default(Draft4Validator)
|
#: the validator we use for Spack config files
|
||||||
|
DefaultSettingValidator = _extend_with_default(Draft4Validator)
|
||||||
|
|
||||||
def validate_section(data, schema):
|
|
||||||
"""Validate data read in from a Spack YAML file.
|
|
||||||
|
|
||||||
This leverages the line information (start_mark, end_mark) stored
|
|
||||||
on Spack YAML structures.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
DefaultSettingValidator(schema).validate(data)
|
|
||||||
except jsonschema.ValidationError as e:
|
|
||||||
raise ConfigFormatError(e, data)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope(object):
|
class ConfigScope(object):
|
||||||
@@ -169,13 +165,8 @@ def __init__(self, name, path):
|
|||||||
self.path = path # path to directory containing configs.
|
self.path = path # path to directory containing configs.
|
||||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||||
|
|
||||||
# Register in a dict of all ConfigScopes
|
|
||||||
# TODO: make this cleaner. Mocking up for testing is brittle.
|
|
||||||
global config_scopes
|
|
||||||
config_scopes[name] = self
|
|
||||||
|
|
||||||
def get_section_filename(self, section):
|
def get_section_filename(self, section):
|
||||||
validate_section_name(section)
|
_validate_section_name(section)
|
||||||
return os.path.join(self.path, "%s.yaml" % section)
|
return os.path.join(self.path, "%s.yaml" % section)
|
||||||
|
|
||||||
def get_section(self, section):
|
def get_section(self, section):
|
||||||
@@ -192,7 +183,7 @@ def write_section(self, section):
|
|||||||
try:
|
try:
|
||||||
mkdirp(self.path)
|
mkdirp(self.path)
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w') as f:
|
||||||
validate_section(data, section_schemas[section])
|
_validate_section(data, section_schemas[section])
|
||||||
syaml.dump(data, stream=f, default_flow_style=False)
|
syaml.dump(data, stream=f, default_flow_style=False)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
raise ConfigSanityError(e, data)
|
raise ConfigSanityError(e, data)
|
||||||
@@ -208,61 +199,229 @@ def __repr__(self):
|
|||||||
return '<ConfigScope: %s: %s>' % (self.name, self.path)
|
return '<ConfigScope: %s: %s>' % (self.name, self.path)
|
||||||
|
|
||||||
|
|
||||||
#
|
class Configuration(object):
|
||||||
# Below are configuration scopes.
|
"""A full Spack configuration, from a hierarchy of config files.
|
||||||
#
|
|
||||||
# Each scope can have per-platfom overrides in subdirectories of the
|
|
||||||
# configuration directory.
|
|
||||||
#
|
|
||||||
_platform = spack.architecture.platform().name
|
|
||||||
|
|
||||||
#: Default configuration scope is the lowest-level scope. These are
|
This class makes it easy to add a new scope on top of an existing one.
|
||||||
#: versioned with Spack and can be overridden by systems, sites or users.
|
"""
|
||||||
_defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
|
||||||
ConfigScope('defaults', _defaults_path)
|
|
||||||
ConfigScope('defaults/%s' % _platform, os.path.join(_defaults_path, _platform))
|
|
||||||
|
|
||||||
#: System configuration is per machine.
|
def __init__(self, *scopes):
|
||||||
#: No system-level configs should be checked into spack by default
|
"""Initialize a configuration with an initial list of scopes.
|
||||||
_system_path = os.path.join(spack.paths.system_etc_path, 'spack')
|
|
||||||
ConfigScope('system', _system_path)
|
|
||||||
ConfigScope('system/%s' % _platform, os.path.join(_system_path, _platform))
|
|
||||||
|
|
||||||
#: Site configuration is per spack instance, for sites or projects.
|
Args:
|
||||||
#: No site-level configs should be checked into spack by default.
|
scopes (list of ConfigScope): list of scopes to add to this
|
||||||
_site_path = os.path.join(spack.paths.etc_path, 'spack')
|
Configuration, ordered from lowest to highest precedence
|
||||||
ConfigScope('site', _site_path)
|
|
||||||
ConfigScope('site/%s' % _platform, os.path.join(_site_path, _platform))
|
|
||||||
|
|
||||||
#: User configuration can override both spack defaults and site config.
|
"""
|
||||||
_user_path = spack.paths.user_config_path
|
self.scopes = OrderedDict()
|
||||||
ConfigScope('user', _user_path)
|
for scope in scopes:
|
||||||
ConfigScope('user/%s' % _platform, os.path.join(_user_path, _platform))
|
self.push_scope(scope)
|
||||||
|
|
||||||
|
def push_scope(self, scope):
|
||||||
|
"""Add a higher precedence scope to the Configuration."""
|
||||||
|
self.scopes[scope.name] = scope
|
||||||
|
|
||||||
|
def pop_scope(self):
|
||||||
|
"""Remove the highest precedence scope and return it."""
|
||||||
|
name, scope = self.scopes.popitem(last=True)
|
||||||
|
return scope
|
||||||
|
|
||||||
|
def highest_precedence_scope(self):
|
||||||
|
"""Get the scope with highest precedence (prefs will override others).
|
||||||
|
"""
|
||||||
|
return list(self.scopes.values())[-1]
|
||||||
|
|
||||||
|
def _validate_scope(self, scope):
|
||||||
|
"""Ensure that scope is valid in this configuration.
|
||||||
|
|
||||||
|
This should be used by routines in ``config.py`` to validate
|
||||||
|
scope name arguments, and to determine a default scope where no
|
||||||
|
scope is specified.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: if ``scope`` is not valid
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ConfigScope: a valid ConfigScope if ``scope`` is ``None`` or valid
|
||||||
|
"""
|
||||||
|
if scope is None:
|
||||||
|
# default to the scope with highest precedence.
|
||||||
|
return self.highest_precedence_scope()
|
||||||
|
|
||||||
|
elif scope in self.scopes:
|
||||||
|
return self.scopes[scope]
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
|
||||||
|
% (scope, self.scopes.keys()))
|
||||||
|
|
||||||
|
def get_config_filename(self, scope, section):
|
||||||
|
"""For some scope and section, get the name of the configuration file.
|
||||||
|
"""
|
||||||
|
scope = self._validate_scope(scope)
|
||||||
|
return scope.get_section_filename(section)
|
||||||
|
|
||||||
|
def clear_caches(self):
|
||||||
|
"""Clears the caches for configuration files,
|
||||||
|
|
||||||
|
This will cause files to be re-read upon the next request."""
|
||||||
|
for scope in self.scopes.values():
|
||||||
|
scope.clear()
|
||||||
|
|
||||||
|
def update_config(self, section, update_data, scope=None):
|
||||||
|
"""Update the configuration file for a particular scope.
|
||||||
|
|
||||||
|
Overwrites contents of a section in a scope with update_data,
|
||||||
|
then writes out the config file.
|
||||||
|
|
||||||
|
update_data should have the top-level section name stripped off
|
||||||
|
(it will be re-added). Data itself can be a list, dict, or any
|
||||||
|
other yaml-ish structure.
|
||||||
|
"""
|
||||||
|
_validate_section_name(section) # validate section name
|
||||||
|
scope = self._validate_scope(scope) # get ConfigScope object
|
||||||
|
|
||||||
|
# read only the requested section's data.
|
||||||
|
scope.sections[section] = {section: update_data}
|
||||||
|
scope.write_section(section)
|
||||||
|
|
||||||
|
def get_config(self, section, scope=None):
|
||||||
|
"""Get configuration settings for a section.
|
||||||
|
|
||||||
|
If ``scope`` is ``None`` or not provided, return the merged contents
|
||||||
|
of all of Spack's configuration scopes. If ``scope`` is provided,
|
||||||
|
return only the confiugration as specified in that scope.
|
||||||
|
|
||||||
|
This off the top-level name from the YAML section. That is, for a
|
||||||
|
YAML config file that looks like this::
|
||||||
|
|
||||||
|
config:
|
||||||
|
install_tree: $spack/opt/spack
|
||||||
|
module_roots:
|
||||||
|
lmod: $spack/share/spack/lmod
|
||||||
|
|
||||||
|
``get_config('config')`` will return::
|
||||||
|
|
||||||
|
{ 'install_tree': '$spack/opt/spack',
|
||||||
|
'module_roots: {
|
||||||
|
'lmod': '$spack/share/spack/lmod'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
_validate_section_name(section)
|
||||||
|
merged_section = syaml.syaml_dict()
|
||||||
|
|
||||||
|
if scope is None:
|
||||||
|
scopes = self.scopes.values()
|
||||||
|
else:
|
||||||
|
scopes = [self._validate_scope(scope)]
|
||||||
|
|
||||||
|
for scope in scopes:
|
||||||
|
# read potentially cached data from the scope.
|
||||||
|
|
||||||
|
data = scope.get_section(section)
|
||||||
|
|
||||||
|
# Skip empty configs
|
||||||
|
if not data or not isinstance(data, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if section not in data:
|
||||||
|
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
merged_section = _merge_yaml(merged_section, data)
|
||||||
|
|
||||||
|
# no config files -- empty config.
|
||||||
|
if section not in merged_section:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# take the top key off before returning.
|
||||||
|
return merged_section[section]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Iterate over scopes in this configuration."""
|
||||||
|
for scope in self.scopes.values():
|
||||||
|
yield scope
|
||||||
|
|
||||||
|
def print_section(self, section):
|
||||||
|
"""Print a configuration to stdout."""
|
||||||
|
try:
|
||||||
|
data = syaml.syaml_dict()
|
||||||
|
data[section] = self.get_config(section)
|
||||||
|
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
|
||||||
|
except (yaml.YAMLError, IOError):
|
||||||
|
raise ConfigError("Error reading configuration: %s" % section)
|
||||||
|
|
||||||
|
|
||||||
def highest_precedence_scope():
|
def get_configuration():
|
||||||
"""Get the scope with highest precedence (prefs will override others)."""
|
"""This constructs Spack's standard configuration scopes
|
||||||
return list(config_scopes.values())[-1]
|
|
||||||
|
|
||||||
|
This is a singleton; it constructs one instance associated with this
|
||||||
|
module and returns it. It is bundled inside a function so that
|
||||||
|
configuratoin can be initialized lazily.
|
||||||
|
|
||||||
def validate_scope(scope):
|
Return:
|
||||||
"""Ensure that scope is valid, and return a valid scope if it is None.
|
Configuration: object for accessing spack configuration
|
||||||
|
|
||||||
This should be used by routines in ``config.py`` to validate
|
|
||||||
scope name arguments, and to determine a default scope where no
|
|
||||||
scope is specified.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if scope is None:
|
global _configuration
|
||||||
# default to the scope with highest precedence.
|
if not _configuration:
|
||||||
return highest_precedence_scope()
|
# Each scope can have per-platfom overrides in subdirectories of the
|
||||||
|
# configuration directory.
|
||||||
|
platform = spack.architecture.platform().name
|
||||||
|
|
||||||
elif scope in config_scopes:
|
scopes = []
|
||||||
return config_scopes[scope]
|
for name, path in configuration_paths:
|
||||||
|
# add the regular scope
|
||||||
|
scopes.append(ConfigScope(name, path))
|
||||||
|
|
||||||
else:
|
# add platform-specific scope
|
||||||
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
|
plat_name = '%s/%s' % (name, platform)
|
||||||
% (scope, config_scopes.keys()))
|
plat_path = os.path.join(path, platform)
|
||||||
|
scopes.append(ConfigScope(plat_name, plat_path))
|
||||||
|
|
||||||
|
_configuration = Configuration(*scopes)
|
||||||
|
|
||||||
|
return _configuration
|
||||||
|
|
||||||
|
|
||||||
|
#: This is the global singleton configuration for Spack.
|
||||||
|
#: TODO: consider making this NOT global and associate it with a spack instance
|
||||||
|
_configuration = None
|
||||||
|
|
||||||
|
|
||||||
|
#: TODO: consider getting rid of these top-level wrapper functions.
|
||||||
|
def get_config(section, scope=None):
|
||||||
|
"""Module-level interface for ``Configuration.get_config()``."""
|
||||||
|
config = get_configuration()
|
||||||
|
return config.get_config(section, scope)
|
||||||
|
|
||||||
|
|
||||||
|
def update_config(section, update_data, scope=None):
|
||||||
|
"""Module-level interface for ``Configuration.update_config()``."""
|
||||||
|
config = get_configuration()
|
||||||
|
return config.update_config(section, update_data, scope)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_section_name(section):
|
||||||
|
"""Exit if the section is not a valid section."""
|
||||||
|
if section not in section_schemas:
|
||||||
|
tty.die("Invalid config section: '%s'. Options are: %s"
|
||||||
|
% (section, " ".join(section_schemas.keys())))
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_section(data, schema):
|
||||||
|
"""Validate data read in from a Spack YAML file.
|
||||||
|
|
||||||
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
|
on Spack YAML structures.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
DefaultSettingValidator(schema).validate(data)
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
raise ConfigFormatError(e, data)
|
||||||
|
|
||||||
|
|
||||||
def _read_config_file(filename, schema):
|
def _read_config_file(filename, schema):
|
||||||
@@ -284,7 +443,7 @@ def _read_config_file(filename, schema):
|
|||||||
data = _mark_overrides(syaml.load(f))
|
data = _mark_overrides(syaml.load(f))
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
validate_section(data, schema)
|
_validate_section(data, schema)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
except MarkedYAMLError as e:
|
except MarkedYAMLError as e:
|
||||||
@@ -296,14 +455,7 @@ def _read_config_file(filename, schema):
|
|||||||
"Error reading configuration file %s: %s" % (filename, str(e)))
|
"Error reading configuration file %s: %s" % (filename, str(e)))
|
||||||
|
|
||||||
|
|
||||||
def clear_config_caches():
|
def _override(string):
|
||||||
"""Clears the caches for configuration files, which will cause them
|
|
||||||
to be re-read upon the next request"""
|
|
||||||
for scope in config_scopes.values():
|
|
||||||
scope.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def override(string):
|
|
||||||
"""Test if a spack YAML string is an override.
|
"""Test if a spack YAML string is an override.
|
||||||
|
|
||||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
||||||
@@ -363,7 +515,7 @@ def they_are(t):
|
|||||||
# Source dict is merged into dest.
|
# Source dict is merged into dest.
|
||||||
elif they_are(dict):
|
elif they_are(dict):
|
||||||
for sk, sv in iteritems(source):
|
for sk, sv in iteritems(source):
|
||||||
if override(sk) or sk not in dest:
|
if _override(sk) or sk not in dest:
|
||||||
# if sk ended with ::, or if it's new, completely override
|
# if sk ended with ::, or if it's new, completely override
|
||||||
dest[sk] = copy.copy(sv)
|
dest[sk] = copy.copy(sv)
|
||||||
else:
|
else:
|
||||||
@@ -376,109 +528,12 @@ def they_are(t):
|
|||||||
return copy.copy(source)
|
return copy.copy(source)
|
||||||
|
|
||||||
|
|
||||||
def get_config(section, scope=None):
|
|
||||||
"""Get configuration settings for a section.
|
|
||||||
|
|
||||||
If ``scope`` is ``None`` or not provided, return the merged contents
|
|
||||||
of all of Spack's configuration scopes. If ``scope`` is provided,
|
|
||||||
return only the confiugration as specified in that scope.
|
|
||||||
|
|
||||||
This off the top-level name from the YAML section. That is, for a
|
|
||||||
YAML config file that looks like this::
|
|
||||||
|
|
||||||
config:
|
|
||||||
install_tree: $spack/opt/spack
|
|
||||||
module_roots:
|
|
||||||
lmod: $spack/share/spack/lmod
|
|
||||||
|
|
||||||
``get_config('config')`` will return::
|
|
||||||
|
|
||||||
{ 'install_tree': '$spack/opt/spack',
|
|
||||||
'module_roots: {
|
|
||||||
'lmod': '$spack/share/spack/lmod'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
"""
|
|
||||||
validate_section_name(section)
|
|
||||||
merged_section = syaml.syaml_dict()
|
|
||||||
|
|
||||||
if scope is None:
|
|
||||||
scopes = config_scopes.values()
|
|
||||||
else:
|
|
||||||
scopes = [validate_scope(scope)]
|
|
||||||
|
|
||||||
for scope in scopes:
|
|
||||||
# read potentially cached data from the scope.
|
|
||||||
|
|
||||||
data = scope.get_section(section)
|
|
||||||
|
|
||||||
# Skip empty configs
|
|
||||||
if not data or not isinstance(data, dict):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if section not in data:
|
|
||||||
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
merged_section = _merge_yaml(merged_section, data)
|
|
||||||
|
|
||||||
# no config files -- empty config.
|
|
||||||
if section not in merged_section:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# take the top key off before returning.
|
|
||||||
return merged_section[section]
|
|
||||||
|
|
||||||
|
|
||||||
def get_config_filename(scope, section):
|
|
||||||
"""For some scope and section, get the name of the configuration file"""
|
|
||||||
scope = validate_scope(scope)
|
|
||||||
return scope.get_section_filename(section)
|
|
||||||
|
|
||||||
|
|
||||||
def update_config(section, update_data, scope=None):
|
|
||||||
"""Update the configuration file for a particular scope.
|
|
||||||
|
|
||||||
Overwrites contents of a section in a scope with update_data,
|
|
||||||
then writes out the config file.
|
|
||||||
|
|
||||||
update_data should have the top-level section name stripped off
|
|
||||||
(it will be re-added). Data itself can be a list, dict, or any
|
|
||||||
other yaml-ish structure.
|
|
||||||
|
|
||||||
"""
|
|
||||||
validate_section_name(section) # validate section name
|
|
||||||
scope = validate_scope(scope) # get ConfigScope object from string.
|
|
||||||
|
|
||||||
# read only the requested section's data.
|
|
||||||
scope.sections[section] = {section: update_data}
|
|
||||||
scope.write_section(section)
|
|
||||||
|
|
||||||
|
|
||||||
def print_section(section):
|
|
||||||
"""Print a configuration to stdout."""
|
|
||||||
try:
|
|
||||||
data = syaml.syaml_dict()
|
|
||||||
data[section] = get_config(section)
|
|
||||||
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
|
|
||||||
except (yaml.YAMLError, IOError):
|
|
||||||
raise ConfigError("Error reading configuration: %s" % section)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigError(SpackError):
|
class ConfigError(SpackError):
|
||||||
pass
|
"""Superclass for all Spack config related errors."""
|
||||||
|
|
||||||
|
|
||||||
class ConfigFileError(ConfigError):
|
class ConfigFileError(ConfigError):
|
||||||
pass
|
"""Issue reading or accessing a configuration file."""
|
||||||
|
|
||||||
|
|
||||||
def get_path(path, data):
|
|
||||||
if path:
|
|
||||||
return get_path(path[1:], data[path[0]])
|
|
||||||
else:
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigFormatError(ConfigError):
|
class ConfigFormatError(ConfigError):
|
||||||
@@ -490,6 +545,12 @@ def __init__(self, validation_error, data):
|
|||||||
parent_mark = getattr(validation_error.parent, '_start_mark', None)
|
parent_mark = getattr(validation_error.parent, '_start_mark', None)
|
||||||
path = [str(s) for s in getattr(validation_error, 'path', None)]
|
path = [str(s) for s in getattr(validation_error, 'path', None)]
|
||||||
|
|
||||||
|
def get_path(path, data):
|
||||||
|
if path:
|
||||||
|
return get_path(path[1:], data[path[0]])
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
# Try really hard to get the parent (which sometimes is not
|
# Try really hard to get the parent (which sometimes is not
|
||||||
# set) This digs it out of the validated structure if it's not
|
# set) This digs it out of the validated structure if it's not
|
||||||
# on the validation_error.
|
# on the validation_error.
|
||||||
|
@@ -36,8 +36,9 @@ def pre_run():
|
|||||||
|
|
||||||
|
|
||||||
def check_compiler_yaml_version():
|
def check_compiler_yaml_version():
|
||||||
config_scopes = spack.config.config_scopes
|
config = spack.config.get_configuration()
|
||||||
for scope in config_scopes.values():
|
|
||||||
|
for scope in config:
|
||||||
file_name = os.path.join(scope.path, 'compilers.yaml')
|
file_name = os.path.join(scope.path, 'compilers.yaml')
|
||||||
data = None
|
data = None
|
||||||
if os.path.isfile(file_name):
|
if os.path.isfile(file_name):
|
||||||
|
@@ -22,7 +22,6 @@
|
|||||||
# License along with this program; if not, write to the Free Software
|
# License along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import pytest
|
|
||||||
from six import iteritems
|
from six import iteritems
|
||||||
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -30,24 +29,23 @@
|
|||||||
from spack.compiler import _get_versioned_tuple
|
from spack.compiler import _get_versioned_tuple
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures('config')
|
def test_get_compiler_duplicates(config):
|
||||||
class TestCompilers(object):
|
# In this case there is only one instance of the specified compiler in
|
||||||
|
# the test configuration (so it is not actually a duplicate), but the
|
||||||
|
# method behaves the same.
|
||||||
|
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
|
||||||
|
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
|
||||||
|
|
||||||
def test_get_compiler_duplicates(self):
|
assert len(cfg_file_to_duplicates) == 1
|
||||||
# In this case there is only one instance of the specified compiler in
|
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
|
||||||
# the test configuration (so it is not actually a duplicate), but the
|
assert len(duplicates) == 1
|
||||||
# method behaves the same.
|
|
||||||
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
|
|
||||||
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
|
|
||||||
assert len(cfg_file_to_duplicates) == 1
|
|
||||||
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
|
|
||||||
assert len(duplicates) == 1
|
|
||||||
|
|
||||||
def test_all_compilers(self):
|
|
||||||
all_compilers = compilers.all_compilers()
|
def test_all_compilers(config):
|
||||||
filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
|
all_compilers = compilers.all_compilers()
|
||||||
filtered = [x for x in filtered if x.operating_system == 'SuSE11']
|
filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
|
||||||
assert len(filtered) == 1
|
filtered = [x for x in filtered if x.operating_system == 'SuSE11']
|
||||||
|
assert len(filtered) == 1
|
||||||
|
|
||||||
|
|
||||||
def test_version_detection_is_empty():
|
def test_version_detection_is_empty():
|
||||||
|
@@ -24,26 +24,23 @@
|
|||||||
##############################################################################
|
##############################################################################
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
from spack.spec import Spec
|
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
from spack.config import ConfigScope
|
||||||
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def concretize_scope(config, tmpdir):
|
def concretize_scope(config, tmpdir):
|
||||||
"""Adds a scope for concretization preferences"""
|
"""Adds a scope for concretization preferences"""
|
||||||
tmpdir.ensure_dir('concretize')
|
tmpdir.ensure_dir('concretize')
|
||||||
spack.config.ConfigScope(
|
config.push_scope(
|
||||||
'concretize', str(tmpdir.join('concretize'))
|
ConfigScope('concretize', str(tmpdir.join('concretize'))))
|
||||||
)
|
|
||||||
yield
|
|
||||||
# This is kind of weird, but that's how config scopes are
|
|
||||||
# set in ConfigScope.__init__
|
|
||||||
spack.config.config_scopes.pop('concretize')
|
|
||||||
spack.package_prefs.PackagePrefs.clear_caches()
|
|
||||||
|
|
||||||
# reset provider index each time, too
|
yield
|
||||||
|
|
||||||
|
config.pop_scope()
|
||||||
|
spack.package_prefs.PackagePrefs.clear_caches()
|
||||||
spack.repo._provider_index = None
|
spack.repo._provider_index = None
|
||||||
|
|
||||||
|
|
||||||
|
@@ -33,9 +33,84 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.config
|
import spack.config
|
||||||
from spack.util.path import canonicalize_path
|
from spack.util.path import canonicalize_path
|
||||||
from spack.util.ordereddict import OrderedDict
|
|
||||||
|
|
||||||
# Some sample compiler config data
|
|
||||||
|
# sample config data
|
||||||
|
config_low = {
|
||||||
|
'config': {
|
||||||
|
'install_tree': 'install_tree_path',
|
||||||
|
'build_stage': ['path1', 'path2', 'path3']}}
|
||||||
|
|
||||||
|
config_override_all = {
|
||||||
|
'config:': {
|
||||||
|
'install_tree:': 'override_all'}}
|
||||||
|
|
||||||
|
config_override_key = {
|
||||||
|
'config': {
|
||||||
|
'install_tree:': 'override_key'}}
|
||||||
|
|
||||||
|
config_merge_list = {
|
||||||
|
'config': {
|
||||||
|
'build_stage': ['patha', 'pathb']}}
|
||||||
|
|
||||||
|
config_override_list = {
|
||||||
|
'config': {
|
||||||
|
'build_stage:': ['patha', 'pathb']}}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def config(tmpdir):
|
||||||
|
"""Mocks the configuration scope."""
|
||||||
|
real_configuration = spack.config._configuration
|
||||||
|
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||||
|
for name in ['low', 'high']]
|
||||||
|
spack.config._configuration = spack.config.Configuration(*scopes)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
spack.config._configuration = real_configuration
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def write_config_file(tmpdir):
|
||||||
|
"""Returns a function that writes a config file."""
|
||||||
|
def _write(config, data, scope):
|
||||||
|
config_yaml = tmpdir.join(scope, config + '.yaml')
|
||||||
|
config_yaml.ensure()
|
||||||
|
with config_yaml.open('w') as f:
|
||||||
|
yaml.dump(data, f)
|
||||||
|
return _write
|
||||||
|
|
||||||
|
|
||||||
|
def check_compiler_config(comps, *compiler_names):
|
||||||
|
"""Check that named compilers in comps match Spack's config."""
|
||||||
|
config = spack.config.get_config('compilers')
|
||||||
|
compiler_list = ['cc', 'cxx', 'f77', 'fc']
|
||||||
|
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
|
||||||
|
'ldflags', 'ldlibs']
|
||||||
|
param_list = ['modules', 'paths', 'spec', 'operating_system']
|
||||||
|
for compiler in config:
|
||||||
|
conf = compiler['compiler']
|
||||||
|
if conf['spec'] in compiler_names:
|
||||||
|
comp = next((c['compiler'] for c in comps if
|
||||||
|
c['compiler']['spec'] == conf['spec']), None)
|
||||||
|
if not comp:
|
||||||
|
raise ValueError('Bad config spec')
|
||||||
|
for p in param_list:
|
||||||
|
assert conf[p] == comp[p]
|
||||||
|
for f in flag_list:
|
||||||
|
expected = comp.get('flags', {}).get(f, None)
|
||||||
|
actual = conf.get('flags', {}).get(f, None)
|
||||||
|
assert expected == actual
|
||||||
|
for c in compiler_list:
|
||||||
|
expected = comp['paths'][c]
|
||||||
|
actual = conf['paths'][c]
|
||||||
|
assert expected == actual
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Some sample compiler config data and tests.
|
||||||
|
#
|
||||||
a_comps = {
|
a_comps = {
|
||||||
'compilers': [
|
'compilers': [
|
||||||
{'compiler': {
|
{'compiler': {
|
||||||
@@ -140,32 +215,110 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Some Sample repo data
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def compiler_specs():
|
||||||
|
"""Returns a couple of compiler specs needed for the tests"""
|
||||||
|
a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
|
||||||
|
b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
|
||||||
|
CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
|
||||||
|
return CompilerSpecs(a=a, b=b)
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_key_in_memory(config, compiler_specs):
|
||||||
|
# Write b_comps "on top of" a_comps.
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', a_comps['compilers'], scope='low'
|
||||||
|
)
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', b_comps['compilers'], scope='high'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make sure the config looks how we expect.
|
||||||
|
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
||||||
|
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_key_to_disk(config, compiler_specs):
|
||||||
|
# Write b_comps "on top of" a_comps.
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', a_comps['compilers'], scope='low'
|
||||||
|
)
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', b_comps['compilers'], scope='high'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clear caches so we're forced to read from disk.
|
||||||
|
spack.config.get_configuration().clear_caches()
|
||||||
|
|
||||||
|
# Same check again, to ensure consistency.
|
||||||
|
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
||||||
|
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_to_same_priority_file(config, compiler_specs):
|
||||||
|
# Write b_comps in the same file as a_comps.
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', a_comps['compilers'], scope='low'
|
||||||
|
)
|
||||||
|
spack.config.update_config(
|
||||||
|
'compilers', b_comps['compilers'], scope='low'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clear caches so we're forced to read from disk.
|
||||||
|
spack.config.get_configuration().clear_caches()
|
||||||
|
|
||||||
|
# Same check again, to ensure consistency.
|
||||||
|
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
||||||
|
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Sample repo data and tests
|
||||||
|
#
|
||||||
repos_low = {'repos': ["/some/path"]}
|
repos_low = {'repos': ["/some/path"]}
|
||||||
repos_high = {'repos': ["/some/other/path"]}
|
repos_high = {'repos': ["/some/other/path"]}
|
||||||
|
|
||||||
|
|
||||||
# sample config data
|
# repos
|
||||||
config_low = {
|
def test_write_list_in_memory(config):
|
||||||
'config': {
|
spack.config.update_config('repos', repos_low['repos'], scope='low')
|
||||||
'install_tree': 'install_tree_path',
|
spack.config.update_config('repos', repos_high['repos'], scope='high')
|
||||||
'build_stage': ['path1', 'path2', 'path3']}}
|
|
||||||
|
|
||||||
config_override_all = {
|
config = spack.config.get_config('repos')
|
||||||
'config:': {
|
assert config == repos_high['repos'] + repos_low['repos']
|
||||||
'install_tree:': 'override_all'}}
|
|
||||||
|
|
||||||
config_override_key = {
|
|
||||||
'config': {
|
|
||||||
'install_tree:': 'override_key'}}
|
|
||||||
|
|
||||||
config_merge_list = {
|
def test_substitute_config_variables(config):
|
||||||
'config': {
|
prefix = spack.paths.prefix.lstrip('/')
|
||||||
'build_stage': ['patha', 'pathb']}}
|
|
||||||
|
|
||||||
config_override_list = {
|
assert os.path.join(
|
||||||
'config': {
|
'/foo/bar/baz', prefix
|
||||||
'build_stage:': ['patha', 'pathb']}}
|
) == canonicalize_path('/foo/bar/baz/$spack')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
spack.paths.prefix, 'foo/bar/baz'
|
||||||
|
) == canonicalize_path('$spack/foo/bar/baz/')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
||||||
|
) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
'/foo/bar/baz', prefix
|
||||||
|
) == canonicalize_path('/foo/bar/baz/${spack}')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
spack.paths.prefix, 'foo/bar/baz'
|
||||||
|
) == canonicalize_path('${spack}/foo/bar/baz/')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
||||||
|
) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
|
||||||
|
|
||||||
|
assert os.path.join(
|
||||||
|
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
||||||
|
) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
|
||||||
|
|
||||||
|
|
||||||
packages_merge_low = {
|
packages_merge_low = {
|
||||||
@@ -212,208 +365,59 @@ def test_merge_with_defaults(config, write_config_file):
|
|||||||
assert cfg['baz']['version'] == ['c']
|
assert cfg['baz']['version'] == ['c']
|
||||||
|
|
||||||
|
|
||||||
def check_compiler_config(comps, *compiler_names):
|
def test_substitute_user(config):
|
||||||
"""Check that named compilers in comps match Spack's config."""
|
user = getpass.getuser()
|
||||||
config = spack.config.get_config('compilers')
|
assert '/foo/bar/' + user + '/baz' == canonicalize_path(
|
||||||
compiler_list = ['cc', 'cxx', 'f77', 'fc']
|
'/foo/bar/$user/baz'
|
||||||
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
|
)
|
||||||
'ldflags', 'ldlibs']
|
|
||||||
param_list = ['modules', 'paths', 'spec', 'operating_system']
|
|
||||||
for compiler in config:
|
|
||||||
conf = compiler['compiler']
|
|
||||||
if conf['spec'] in compiler_names:
|
|
||||||
comp = next((c['compiler'] for c in comps if
|
|
||||||
c['compiler']['spec'] == conf['spec']), None)
|
|
||||||
if not comp:
|
|
||||||
raise ValueError('Bad config spec')
|
|
||||||
for p in param_list:
|
|
||||||
assert conf[p] == comp[p]
|
|
||||||
for f in flag_list:
|
|
||||||
expected = comp.get('flags', {}).get(f, None)
|
|
||||||
actual = conf.get('flags', {}).get(f, None)
|
|
||||||
assert expected == actual
|
|
||||||
for c in compiler_list:
|
|
||||||
expected = comp['paths'][c]
|
|
||||||
actual = conf['paths'][c]
|
|
||||||
assert expected == actual
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
def test_substitute_tempdir(config):
|
||||||
def config(tmpdir):
|
tempdir = tempfile.gettempdir()
|
||||||
"""Mocks the configuration scope."""
|
assert tempdir == canonicalize_path('$tempdir')
|
||||||
spack.config.clear_config_caches()
|
assert tempdir + '/foo/bar/baz' == canonicalize_path(
|
||||||
real_scope = spack.config.config_scopes
|
'$tempdir/foo/bar/baz'
|
||||||
spack.config.config_scopes = OrderedDict()
|
)
|
||||||
for priority in ['low', 'high']:
|
|
||||||
spack.config.ConfigScope(priority, str(tmpdir.join(priority)))
|
|
||||||
Config = collections.namedtuple('Config', ['real', 'mock'])
|
|
||||||
yield Config(real=real_scope, mock=spack.config.config_scopes)
|
|
||||||
spack.config.config_scopes = real_scope
|
|
||||||
spack.config.clear_config_caches()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
def test_read_config(config, write_config_file):
|
||||||
def write_config_file(tmpdir):
|
write_config_file('config', config_low, 'low')
|
||||||
"""Returns a function that writes a config file."""
|
assert spack.config.get_config('config') == config_low['config']
|
||||||
def _write(config, data, scope):
|
|
||||||
config_yaml = tmpdir.join(scope, config + '.yaml')
|
|
||||||
config_yaml.ensure()
|
|
||||||
with config_yaml.open('w') as f:
|
|
||||||
yaml.dump(data, f)
|
|
||||||
return _write
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
def test_read_config_override_all(config, write_config_file):
|
||||||
def compiler_specs():
|
write_config_file('config', config_low, 'low')
|
||||||
"""Returns a couple of compiler specs needed for the tests"""
|
write_config_file('config', config_override_all, 'high')
|
||||||
a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
|
assert spack.config.get_config('config') == {
|
||||||
b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
|
'install_tree': 'override_all'
|
||||||
CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
|
}
|
||||||
return CompilerSpecs(a=a, b=b)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures('config')
|
def test_read_config_override_key(config, write_config_file):
|
||||||
class TestConfig(object):
|
write_config_file('config', config_low, 'low')
|
||||||
|
write_config_file('config', config_override_key, 'high')
|
||||||
|
assert spack.config.get_config('config') == {
|
||||||
|
'install_tree': 'override_key',
|
||||||
|
'build_stage': ['path1', 'path2', 'path3']
|
||||||
|
}
|
||||||
|
|
||||||
def test_write_list_in_memory(self):
|
|
||||||
spack.config.update_config('repos', repos_low['repos'], scope='low')
|
|
||||||
spack.config.update_config('repos', repos_high['repos'], scope='high')
|
|
||||||
|
|
||||||
config = spack.config.get_config('repos')
|
def test_read_config_merge_list(config, write_config_file):
|
||||||
assert config == repos_high['repos'] + repos_low['repos']
|
write_config_file('config', config_low, 'low')
|
||||||
|
write_config_file('config', config_merge_list, 'high')
|
||||||
|
assert spack.config.get_config('config') == {
|
||||||
|
'install_tree': 'install_tree_path',
|
||||||
|
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
|
||||||
|
}
|
||||||
|
|
||||||
def test_write_key_in_memory(self, compiler_specs):
|
|
||||||
# Write b_comps "on top of" a_comps.
|
|
||||||
spack.config.update_config(
|
|
||||||
'compilers', a_comps['compilers'], scope='low'
|
|
||||||
)
|
|
||||||
spack.config.update_config(
|
|
||||||
'compilers', b_comps['compilers'], scope='high'
|
|
||||||
)
|
|
||||||
# Make sure the config looks how we expect.
|
|
||||||
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
|
||||||
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
|
||||||
|
|
||||||
def test_write_key_to_disk(self, compiler_specs):
|
def test_read_config_override_list(config, write_config_file):
|
||||||
# Write b_comps "on top of" a_comps.
|
write_config_file('config', config_low, 'low')
|
||||||
spack.config.update_config(
|
write_config_file('config', config_override_list, 'high')
|
||||||
'compilers', a_comps['compilers'], scope='low'
|
assert spack.config.get_config('config') == {
|
||||||
)
|
'install_tree': 'install_tree_path',
|
||||||
spack.config.update_config(
|
'build_stage': ['patha', 'pathb']
|
||||||
'compilers', b_comps['compilers'], scope='high'
|
}
|
||||||
)
|
|
||||||
# Clear caches so we're forced to read from disk.
|
|
||||||
spack.config.clear_config_caches()
|
|
||||||
# Same check again, to ensure consistency.
|
|
||||||
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
|
||||||
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
|
||||||
|
|
||||||
def test_write_to_same_priority_file(self, compiler_specs):
|
|
||||||
# Write b_comps in the same file as a_comps.
|
|
||||||
spack.config.update_config(
|
|
||||||
'compilers', a_comps['compilers'], scope='low'
|
|
||||||
)
|
|
||||||
spack.config.update_config(
|
|
||||||
'compilers', b_comps['compilers'], scope='low'
|
|
||||||
)
|
|
||||||
# Clear caches so we're forced to read from disk.
|
|
||||||
spack.config.clear_config_caches()
|
|
||||||
# Same check again, to ensure consistency.
|
|
||||||
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
|
||||||
check_compiler_config(b_comps['compilers'], *compiler_specs.b)
|
|
||||||
|
|
||||||
def check_canonical(self, var, expected):
|
|
||||||
"""Ensure that <expected> is substituted properly for <var> in strings
|
|
||||||
containing <var> in various positions."""
|
|
||||||
path = '/foo/bar/baz'
|
|
||||||
|
|
||||||
self.assertEqual(canonicalize_path(var + path),
|
|
||||||
expected + path)
|
|
||||||
|
|
||||||
self.assertEqual(canonicalize_path(path + var),
|
|
||||||
path + '/' + expected)
|
|
||||||
|
|
||||||
self.assertEqual(canonicalize_path(path + var + path),
|
|
||||||
expected + path)
|
|
||||||
|
|
||||||
def test_substitute_config_variables(self):
|
|
||||||
prefix = spack.paths.prefix.lstrip('/')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
'/foo/bar/baz', prefix
|
|
||||||
) == canonicalize_path('/foo/bar/baz/$spack')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
spack.paths.prefix, 'foo/bar/baz'
|
|
||||||
) == canonicalize_path('$spack/foo/bar/baz/')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
|
||||||
) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
'/foo/bar/baz', prefix
|
|
||||||
) == canonicalize_path('/foo/bar/baz/${spack}')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
spack.paths.prefix, 'foo/bar/baz'
|
|
||||||
) == canonicalize_path('${spack}/foo/bar/baz/')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
|
||||||
) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
|
|
||||||
|
|
||||||
assert os.path.join(
|
|
||||||
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
|
||||||
) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
|
|
||||||
|
|
||||||
def test_substitute_user(self):
|
|
||||||
user = getpass.getuser()
|
|
||||||
assert '/foo/bar/' + user + '/baz' == canonicalize_path(
|
|
||||||
'/foo/bar/$user/baz'
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_substitute_tempdir(self):
|
|
||||||
tempdir = tempfile.gettempdir()
|
|
||||||
assert tempdir == canonicalize_path('$tempdir')
|
|
||||||
assert tempdir + '/foo/bar/baz' == canonicalize_path(
|
|
||||||
'$tempdir/foo/bar/baz'
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_read_config(self, write_config_file):
|
|
||||||
write_config_file('config', config_low, 'low')
|
|
||||||
assert spack.config.get_config('config') == config_low['config']
|
|
||||||
|
|
||||||
def test_read_config_override_all(self, write_config_file):
|
|
||||||
write_config_file('config', config_low, 'low')
|
|
||||||
write_config_file('config', config_override_all, 'high')
|
|
||||||
assert spack.config.get_config('config') == {
|
|
||||||
'install_tree': 'override_all'
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_read_config_override_key(self, write_config_file):
|
|
||||||
write_config_file('config', config_low, 'low')
|
|
||||||
write_config_file('config', config_override_key, 'high')
|
|
||||||
assert spack.config.get_config('config') == {
|
|
||||||
'install_tree': 'override_key',
|
|
||||||
'build_stage': ['path1', 'path2', 'path3']
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_read_config_merge_list(self, write_config_file):
|
|
||||||
write_config_file('config', config_low, 'low')
|
|
||||||
write_config_file('config', config_merge_list, 'high')
|
|
||||||
assert spack.config.get_config('config') == {
|
|
||||||
'install_tree': 'install_tree_path',
|
|
||||||
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_read_config_override_list(self, write_config_file):
|
|
||||||
write_config_file('config', config_low, 'low')
|
|
||||||
write_config_file('config', config_override_list, 'high')
|
|
||||||
assert spack.config.get_config('config') == {
|
|
||||||
'install_tree': 'install_tree_path',
|
|
||||||
'build_stage': ['patha', 'pathb']
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_keys_are_ordered():
|
def test_keys_are_ordered():
|
||||||
|
@@ -216,17 +216,21 @@ def configuration_dir(tmpdir_factory, linux_os):
|
|||||||
directory path.
|
directory path.
|
||||||
"""
|
"""
|
||||||
tmpdir = tmpdir_factory.mktemp('configurations')
|
tmpdir = tmpdir_factory.mktemp('configurations')
|
||||||
|
|
||||||
# Name of the yaml files in the test/data folder
|
# Name of the yaml files in the test/data folder
|
||||||
test_path = py.path.local(spack.paths.test_path)
|
test_path = py.path.local(spack.paths.test_path)
|
||||||
compilers_yaml = test_path.join('data', 'compilers.yaml')
|
compilers_yaml = test_path.join('data', 'compilers.yaml')
|
||||||
packages_yaml = test_path.join('data', 'packages.yaml')
|
packages_yaml = test_path.join('data', 'packages.yaml')
|
||||||
config_yaml = test_path.join('data', 'config.yaml')
|
config_yaml = test_path.join('data', 'config.yaml')
|
||||||
|
|
||||||
# Create temporary 'site' and 'user' folders
|
# Create temporary 'site' and 'user' folders
|
||||||
tmpdir.ensure('site', dir=True)
|
tmpdir.ensure('site', dir=True)
|
||||||
tmpdir.ensure('user', dir=True)
|
tmpdir.ensure('user', dir=True)
|
||||||
|
|
||||||
# Copy the configurations that don't need further work
|
# Copy the configurations that don't need further work
|
||||||
packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
|
packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
|
||||||
config_yaml.copy(tmpdir.join('site', 'config.yaml'))
|
config_yaml.copy(tmpdir.join('site', 'config.yaml'))
|
||||||
|
|
||||||
# Write the one that needs modifications
|
# Write the one that needs modifications
|
||||||
content = ''.join(compilers_yaml.read()).format(linux_os)
|
content = ''.join(compilers_yaml.read()).format(linux_os)
|
||||||
t = tmpdir.join('site', 'compilers.yaml')
|
t = tmpdir.join('site', 'compilers.yaml')
|
||||||
@@ -239,18 +243,20 @@ def config(configuration_dir):
|
|||||||
"""Hooks the mock configuration files into spack.config"""
|
"""Hooks the mock configuration files into spack.config"""
|
||||||
# Set up a mock config scope
|
# Set up a mock config scope
|
||||||
spack.package_prefs.PackagePrefs.clear_caches()
|
spack.package_prefs.PackagePrefs.clear_caches()
|
||||||
spack.config.clear_config_caches()
|
|
||||||
real_scope = spack.config.config_scopes
|
|
||||||
spack.config.config_scopes = spack.util.ordereddict.OrderedDict()
|
|
||||||
spack.config.ConfigScope('site', str(configuration_dir.join('site')))
|
|
||||||
spack.config.ConfigScope('system', str(configuration_dir.join('system')))
|
|
||||||
spack.config.ConfigScope('user', str(configuration_dir.join('user')))
|
|
||||||
Config = collections.namedtuple('Config', ['real', 'mock'])
|
|
||||||
|
|
||||||
yield Config(real=real_scope, mock=spack.config.config_scopes)
|
real_configuration = spack.config._configuration
|
||||||
|
|
||||||
spack.config.config_scopes = real_scope
|
print real_configuration
|
||||||
spack.config.clear_config_caches()
|
|
||||||
|
scopes = [
|
||||||
|
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||||
|
for name in ['site', 'system', 'user']]
|
||||||
|
config = spack.config.Configuration(*scopes)
|
||||||
|
spack.config._configuration = config
|
||||||
|
|
||||||
|
yield config
|
||||||
|
|
||||||
|
spack.config._configuration = real_configuration
|
||||||
spack.package_prefs.PackagePrefs.clear_caches()
|
spack.package_prefs.PackagePrefs.clear_caches()
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user