config: simplify configuration API to just get and set

- replace `spack.config.get_configuration()` with `spack.config.config()`

- replace `get_config`/`update_config` with `get`, `set`

- add a path syntax that can be used to refer to specific config options
  without firt getting the entire configuration dict

- update usages of `get_config` and `update_config` to use `get` and `set`
This commit is contained in:
Todd Gamblin 2018-04-14 19:51:29 -07:00 committed by scheibelp
parent c9ea957563
commit 863ccd5e94
31 changed files with 207 additions and 173 deletions

View File

@ -41,35 +41,33 @@
from spack.util.path import canonicalize_path
_config = spack.config.get_config('config')
# TODO: get this out of __init__.py
binary_cache_retrieved_specs = set()
#: Directories where to search for templates
template_dirs = spack.config.get_config('config')['template_dirs']
template_dirs = spack.config.get('config:template_dirs')
template_dirs = [canonicalize_path(x) for x in template_dirs]
#: If this is enabled, tools that use SSL should not verify
#: certifiates. e.g., curl should use the -k option.
insecure = not _config.get('verify_ssl', True)
insecure = not spack.config.get('config:verify_ssl', True)
#: Whether spack should allow installation of unsafe versions of software.
#: "Unsafe" versions are ones it doesn't have a checksum for.
do_checksum = _config.get('checksum', True)
do_checksum = spack.config.get('config:checksum', True)
# If this is True, spack will not clean the environment to remove
# potentially harmful variables before builds.
dirty = _config.get('dirty', False)
dirty = spack.config.get('config:dirty', False)
#: The number of jobs to use when building in parallel.
#: By default, use all cores on the machine.
build_jobs = _config.get('build_jobs', multiprocessing.cpu_count())
build_jobs = spack.config.get('config:build_jobs', multiprocessing.cpu_count())
#-----------------------------------------------------------------------------

View File

@ -364,7 +364,7 @@ def download_tarball(spec):
Download binary tarball for given package into stage area
Return True if successful
"""
mirrors = spack.config.get_config('mirrors')
mirrors = spack.config.get('mirrors')
if len(mirrors) == 0:
tty.die("Please add a spack mirror to allow " +
"download of pre-compiled packages.")
@ -538,7 +538,7 @@ def get_specs(force=False):
previously_retrieved = spack.binary_cache_retrieved_specs
return previously_retrieved
mirrors = spack.config.get_config('mirrors')
mirrors = spack.config.get('mirrors')
if len(mirrors) == 0:
tty.warn("No Spack mirrors are currently configured")
return {}
@ -588,7 +588,7 @@ def get_keys(install=False, trust=False, force=False):
"""
Get pgp public keys available on mirror
"""
mirrors = spack.config.get_config('mirrors')
mirrors = spack.config.get('mirrors')
if len(mirrors) == 0:
tty.die("Please add a spack mirror to allow " +
"download of build caches.")

View File

@ -44,8 +44,7 @@ def misc_cache():
global _misc_cache
if _misc_cache is None:
config = spack.config.get_config('config')
path = config.get('misc_cache')
path = spack.config.get('config:misc_cache')
if not path:
path = os.path.join(spack.paths.user_config_path, 'cache')
path = canonicalize_path(path)
@ -63,8 +62,7 @@ def fetch_cache():
global _fetch_cache
if _fetch_cache is None:
config = spack.config.get_config('config')
path = config.get('source_cache')
path = spack.config.get('config:source_cache')
if not path:
path = os.path.join(spack.paths.var_path, "cache")
path = canonicalize_path(path)

View File

@ -49,8 +49,7 @@ def default_modify_scope():
Commands that modify configuration by default modify the *highest*
priority scope.
"""
config = spack.config.get_configuration()
return config.highest_precedence_scope().name
return spack.config.config().highest_precedence_scope().name
def default_list_scope():

View File

@ -46,7 +46,8 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
scopes = spack.config.get_configuration().scopes
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Find
find_parser = sp.add_parser(
@ -54,7 +55,7 @@ def setup_parser(subparser):
help='search the system for compilers to add to Spack configuration')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
@ -66,14 +67,14 @@ def setup_parser(subparser):
help='remove ALL compilers that match spec')
remove_parser.add_argument('compiler_spec')
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# List
list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
@ -81,7 +82,7 @@ def setup_parser(subparser):
info_parser = sp.add_parser('info', help='show compiler paths')
info_parser.add_argument('compiler_spec')
info_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
@ -114,7 +115,7 @@ def compiler_find(args):
n = len(new_compilers)
s = 's' if n > 1 else ''
config = spack.config.get_configuration()
config = spack.config.config()
filename = config.get_config_filename(args.scope, 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)

View File

@ -31,9 +31,11 @@
def setup_parser(subparser):
scopes = spack.config.get_configuration().scopes
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
subparser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
help="configuration scope to read/modify")

View File

@ -30,10 +30,12 @@
def setup_parser(subparser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# User can only choose one
subparser.add_argument(
'--scope', choices=spack.config.get_configuration().scopes,
metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
help="configuration scope to read/modify")
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
@ -54,8 +56,7 @@ def setup_parser(subparser):
def config_get(args):
config = spack.config.get_configuration()
config.print_section(args.section)
spack.config.config().print_section(args.section)
def config_edit(args):
@ -67,7 +68,7 @@ def config_edit(args):
if not args.section:
args.section = None
config = spack.config.get_configuration()
config = spack.config.config()
config_file = config.get_config_filename(args.scope, args.section)
spack.editor(config_file)

View File

@ -67,7 +67,9 @@ def setup_parser(subparser):
const=1, default=0,
help="only fetch one 'preferred' version per spec, not all known")
scopes = spack.config.get_configuration().scopes
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Add
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
@ -75,7 +77,7 @@ def setup_parser(subparser):
add_parser.add_argument(
'url', help="url of mirror directory from 'spack mirror create'")
add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
@ -84,14 +86,14 @@ def setup_parser(subparser):
help=mirror_remove.__doc__)
remove_parser.add_argument('name')
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
# List
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
@ -102,7 +104,7 @@ def mirror_add(args):
if url.startswith('/'):
url = 'file://' + url
mirrors = spack.config.get_config('mirrors', scope=args.scope)
mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors:
mirrors = syaml_dict()
@ -116,14 +118,14 @@ def mirror_add(args):
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url))
mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
spack.config.set('mirrors', mirrors, scope=args.scope)
def mirror_remove(args):
"""Remove a mirror by name."""
name = args.name
mirrors = spack.config.get_config('mirrors', scope=args.scope)
mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors:
mirrors = syaml_dict()
@ -131,13 +133,13 @@ def mirror_remove(args):
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
spack.config.set('mirrors', mirrors, scope=args.scope)
tty.msg("Removed mirror %s with url %s" % (name, old_value))
def mirror_list(args):
"""Print out available mirrors to the console."""
mirrors = spack.config.get_config('mirrors', scope=args.scope)
mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors:
tty.msg("No mirrors configured.")
return

View File

@ -39,7 +39,8 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.get_configuration().scopes
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Create
create_parser = sp.add_parser('create', help=repo_create.__doc__)
@ -52,7 +53,7 @@ def setup_parser(subparser):
# List
list_parser = sp.add_parser('list', help=repo_list.__doc__)
list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(),
help="configuration scope to read from")
@ -61,7 +62,7 @@ def setup_parser(subparser):
add_parser.add_argument(
'path', help="path to a Spack package repository directory")
add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
@ -72,7 +73,7 @@ def setup_parser(subparser):
'path_or_namespace',
help="path or namespace of a Spack package repository")
remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar,
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(),
help="configuration scope to modify")
@ -104,7 +105,7 @@ def repo_add(args):
repo = Repo(canon_path)
# If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope)
repos = spack.config.get('repos', scope=args.scope)
if not repos:
repos = []
@ -112,13 +113,13 @@ def repo_add(args):
tty.die("Repository is already registered with Spack: %s" % path)
repos.insert(0, canon_path)
spack.config.update_config('repos', repos, args.scope)
spack.config.set('repos', repos, args.scope)
tty.msg("Added repo with namespace '%s'." % repo.namespace)
def repo_remove(args):
"""Remove a repository from Spack's configuration."""
repos = spack.config.get_config('repos', args.scope)
repos = spack.config.get('repos', scope=args.scope)
path_or_namespace = args.path_or_namespace
# If the argument is a path, remove that repository from config.
@ -127,7 +128,7 @@ def repo_remove(args):
repo_canon_path = canonicalize_path(repo_path)
if canon_path == repo_canon_path:
repos.remove(repo_path)
spack.config.update_config('repos', repos, args.scope)
spack.config.set('repos', repos, args.scope)
tty.msg("Removed repository %s" % repo_path)
return
@ -137,7 +138,7 @@ def repo_remove(args):
repo = Repo(path)
if repo.namespace == path_or_namespace:
repos.remove(path)
spack.config.update_config('repos', repos, args.scope)
spack.config.set('repos', repos, args.scope)
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
@ -150,7 +151,7 @@ def repo_remove(args):
def repo_list(args):
"""Show registered repositories and their namespaces."""
roots = spack.config.get_config('repos', args.scope)
roots = spack.config.get('repos', scope=args.scope)
repos = []
for r in roots:
try:

View File

@ -88,9 +88,9 @@ def init_compiler_config():
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
spack.config.update_config('compilers', compilers_dict, scope=scope)
spack.config.set('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope)
config = spack.config.get('compilers', scope=scope)
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
@ -98,15 +98,15 @@ def init_compiler_config():
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
config = spack.config.get_config('compilers', scope=scope)
config = spack.config.get('compilers', scope=scope)
elif scope == 'user':
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site')
sys_config = spack.config.get_config('compilers', scope='system')
site_config = spack.config.get('compilers', scope='site')
sys_config = spack.config.get('compilers', scope='system')
if not site_config and not sys_config:
init_compiler_config()
config = spack.config.get_config('compilers', scope=scope)
config = spack.config.get('compilers', scope=scope)
return config
elif config:
return config
@ -116,10 +116,10 @@ def init_compiler_config():
def compiler_config_files():
config_files = list()
config = spack.config.get_configuration()
config = spack.config.config()
for scope in config.file_scopes:
name = scope.name
compiler_config = config.get_config('compilers', scope=name)
compiler_config = config.get('compilers', scope=name)
if compiler_config:
config_files.append(config.get_config_filename(name, 'compilers'))
return config_files
@ -137,7 +137,7 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
compiler_config.append(_to_dict(compiler))
global _cache_config_file
_cache_config_file = compiler_config
spack.config.update_config('compilers', compiler_config, scope)
spack.config.set('compilers', compiler_config, scope=scope)
@_auto_compiler_spec
@ -162,7 +162,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.update_config('compilers', filtered_compiler_config, scope)
spack.config.set('compilers', filtered_compiler_config, scope=scope)
def all_compilers_config(scope=None, init_config=True):
@ -339,7 +339,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.get_configuration()
config = spack.config.config()
scope_to_compilers = {}
for scope in config.scopes:

View File

@ -109,10 +109,6 @@
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
#: config scopes only used by Spack internally
internal_scopes = ['commands']
def _extend_with_default(validator_class):
"""Add support for the 'default' attr for properties and patternProperties.
@ -379,6 +375,54 @@ def get_config(self, section, scope=None):
# take the top key off before returning.
return merged_section[section]
def get(self, path, default=None, scope=None):
"""Get a config section or a single value from one.
Accepts a path syntax that allows us to grab nested config map
entries. Getting the 'config' section would look like::
spack.config.get('config')
and the ``dirty`` section in the ``config`` scope would be::
spack.config.get('config:dirty')
We use ``:`` as the separator, like YAML objects.
"""
# TODO: Currently only handles maps. Think about lists if neded.
section, _, rest = path.partition(':')
value = self.get_config(section, scope=scope)
if not rest:
return value
parts = rest.split(':')
while parts:
key = parts.pop(0)
value = value.get(key, default)
return value
def set(self, path, value, scope=None):
"""Convenience function for setting single values in config files.
Accepts the path syntax described in ``get()``.
"""
section, _, rest = path.partition(':')
if not rest:
self.update_config(section, value, scope=scope)
else:
section_data = self.get_config(section, scope=scope)
parts = rest.split(':')
data = section_data
while len(parts) > 1:
key = parts.pop(0)
data = data[key]
data[parts[0]] = value
self.update_config(section, section_data, scope=scope)
def __iter__(self):
"""Iterate over scopes in this configuration."""
for scope in self.scopes.values():
@ -394,15 +438,15 @@ def print_section(self, section):
raise ConfigError("Error reading configuration: %s" % section)
def get_configuration():
"""This constructs Spack's standard configuration scopes
def config():
"""Singleton Configuration instance.
This is a singleton; it constructs one instance associated with this
module and returns it. It is bundled inside a function so that
configuratoin can be initialized lazily.
This constructs one instance associated with this module and returns
it. It is bundled inside a function so that configuratoin can be
initialized lazily.
Return:
Configuration: object for accessing spack configuration
(Configuration): object for accessing spack configuration
"""
global _configuration
@ -423,7 +467,7 @@ def get_configuration():
# we make a special scope for spack commands so that they can
# override configuration options.
_configuration.push_scope(InternalConfigScope('commands'))
_configuration.push_scope(InternalConfigScope('command_line'))
return _configuration
@ -433,17 +477,22 @@ def get_configuration():
_configuration = None
#: TODO: consider getting rid of these top-level wrapper functions.
def get_config(section, scope=None):
"""Module-level interface for ``Configuration.get_config()``."""
config = get_configuration()
return config.get_config(section, scope)
def get(path, default=None, scope=None):
"""Module-level wrapper for ``Configuration.get()``."""
return config().get(path, default, scope)
def update_config(section, update_data, scope=None):
"""Module-level interface for ``Configuration.update_config()``."""
config = get_configuration()
return config.update_config(section, update_data, scope)
def set(path, value, scope=None):
"""Convenience function for getting single values in config files.
Accepts the path syntax described in ``get()``.
"""
return config().set(path, value, scope)
def scopes():
"""Convenience function to get list of configuration scopes."""
return config().scopes
def _validate_section_name(section):

View File

@ -27,7 +27,7 @@
import llnl.util.tty as tty
try:
enabled = spack.modules.common.configuration['enable']
enabled = spack.config.get('modules:enable')
except KeyError:
tty.debug('NO MODULE WRITTEN: list of enabled module files is empty')
enabled = []

View File

@ -36,7 +36,7 @@ def pre_run():
def check_compiler_yaml_version():
config = spack.config.get_configuration()
config = spack.config.config()
for scope in config.file_scopes:
file_name = os.path.join(scope.path, 'compilers.yaml')

View File

@ -65,14 +65,14 @@
import spack.util.environment
import spack.error
#: Root folders where the various module files should be written
roots = spack.config.get_config('config').get('module_roots', {})
#: config section for this file
configuration = spack.config.get('modules')
#: Merged modules.yaml as a dictionary
configuration = spack.config.get_config('modules')
#: Root folders where the various module files should be written
roots = spack.config.get('config:module_roots', {})
#: Inspections that needs to be done on spec prefixes
prefix_inspections = configuration.get('prefix_inspections', {})
prefix_inspections = spack.config.get('modules:prefix_inspections', {})
#: Valid tokens for naming scheme and env variable names
_valid_tokens = (

View File

@ -25,11 +25,12 @@
"""This module implements the classes necessary to generate dotkit modules."""
import os.path
import spack.config
from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration
from .common import BaseContext, BaseModuleFileWriter
#: Dotkit specific part of the configuration
configuration = configuration.get('dotkit', {})
configuration = spack.config.get('modules:dotkit', {})
#: Caches the configuration {spec_hash: configuration}
configuration_registry = {}

View File

@ -22,21 +22,23 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os.path
import llnl.util.lang as lang
import itertools
import collections
import spack.config
import spack.compilers
import spack.spec
import spack.error
import itertools
import collections
import spack.tengine as tengine
from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration
from .common import BaseContext, BaseModuleFileWriter
#: LMOD specific part of the configuration
configuration = configuration.get('lmod', {})
#: TCL specific part of the configuration
configuration = spack.config.get('modules:lmod', {})
#: Caches the configuration {spec_hash: configuration}
configuration_registry = {}

View File

@ -27,15 +27,16 @@
"""
import os.path
import string
import spack.tengine as tengine
import llnl.util.tty as tty
import spack.config
import spack.tengine as tengine
from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration
from .common import BaseContext, BaseModuleFileWriter
#: TCL specific part of the configuration
configuration = configuration.get('tcl', {})
configuration = spack.config.get('modules:tcl', {})
#: Caches the configuration {spec_hash: configuration}
configuration_registry = {}

View File

@ -44,7 +44,7 @@ def _spec_type(component):
def get_packages_config():
"""Wrapper around get_packages_config() to validate semantics."""
config = spack.config.get_config('packages')
config = spack.config.get('packages')
# Get a list of virtuals from packages.yaml. Note that because we
# check spack.repo, this collects virtuals that are actually provided

View File

@ -363,7 +363,7 @@ def __init__(self, *repo_dirs, **kwargs):
# If repo_dirs is empty, just use the configuration
if not repo_dirs:
import spack.config
repo_dirs = spack.config.get_config('repos')
repo_dirs = spack.config.get('repos')
if not repo_dirs:
raise NoRepoConfiguredError(
"Spack configuration contains no package repositories.")

View File

@ -84,8 +84,7 @@ def get_tmp_root():
return None
if _tmp_root is None:
config = spack.config.get_config('config')
candidates = config['build_stage']
candidates = spack.config.get('config:build_stage')
if isinstance(candidates, string_types):
candidates = [candidates]
@ -378,7 +377,7 @@ def fetch(self, mirror_only=False):
# TODO: CompositeFetchStrategy here.
self.skip_checksum_for_mirror = True
if self.mirror_path:
mirrors = spack.config.get_config('mirrors')
mirrors = spack.config.get('mirrors')
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
@ -652,7 +651,7 @@ def cache_local(self):
def _get_mirrors():
"""Get mirrors from spack configuration."""
config = spack.config.get_config('mirrors')
config = spack.config.get('mirrors')
return [val for name, val in iteritems(config)]

View File

@ -53,16 +53,11 @@
__author__ = "Benedikt Hegner (CERN)"
__all__ = ['db', 'extensions', 'layout', 'root']
#
# Read in the config
#
config = spack.config.get_config("config")
#
# Set up the install path
#
root = canonicalize_path(
config.get('install_tree', os.path.join(spack.paths.opt_path, 'spack')))
root = canonicalize_path(spack.config.get(
'config:install_tree', os.path.join(spack.paths.opt_path, 'spack')))
#
# Set up the installed packages database
@ -73,8 +68,9 @@
# This controls how spack lays out install prefixes and
# stage directories.
#
layout = YamlDirectoryLayout(root,
hash_len=config.get('install_hash_length'),
path_scheme=config.get('install_path_scheme'))
layout = YamlDirectoryLayout(
root,
hash_len=spack.config.get('config:install_hash_length'),
path_scheme=spack.config.get('config:install_path_scheme'))
extensions = YamlExtensionsLayout(root, layout)

View File

@ -51,7 +51,7 @@ def concretize(abstract_spec):
def update_packages(pkgname, section, value):
"""Update config and reread package list"""
conf = {pkgname: {section: value}}
spack.config.update_config('packages', conf, 'concretize')
spack.config.set('packages', conf, scope='concretize')
spack.package_prefs.PackagePrefs.clear_caches()
@ -133,7 +133,7 @@ def test_no_virtuals_in_packages_yaml(self):
paths:
mpi-with-lapack@2.1: /path/to/lapack
""")
spack.config.update_config('packages', conf, 'concretize')
spack.config.set('packages', conf, scope='concretize')
# now when we get the packages.yaml config, there should be an error
with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
@ -145,7 +145,7 @@ def test_all_is_not_a_virtual(self):
all:
variants: [+mpi]
""")
spack.config.update_config('packages', conf, 'concretize')
spack.config.set('packages', conf, scope='concretize')
# should be no error for 'all':
spack.package_prefs.PackagePrefs.clear_caches()
@ -167,7 +167,7 @@ def test_external_mpi(self):
paths:
mpich@3.0.4: /dummy/path
""")
spack.config.update_config('packages', conf, 'concretize')
spack.config.set('packages', conf, scope='concretize')
# ensure that once config is in place, external is used
spec = Spec('mpi')

View File

@ -85,7 +85,7 @@ def _write(config, data, scope):
def check_compiler_config(comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers')
config = spack.config.get('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc']
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
'ldflags', 'ldlibs']
@ -228,12 +228,8 @@ def compiler_specs():
def test_write_key_in_memory(config, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
spack.config.set('compilers', a_comps['compilers'], scope='low')
spack.config.set('compilers', b_comps['compilers'], scope='high')
# Make sure the config looks how we expect.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -242,15 +238,11 @@ def test_write_key_in_memory(config, compiler_specs):
def test_write_key_to_disk(config, compiler_specs):
# Write b_comps "on top of" a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
spack.config.set('compilers', a_comps['compilers'], scope='low')
spack.config.set('compilers', b_comps['compilers'], scope='high')
# Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches()
spack.config.config().clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -259,15 +251,11 @@ def test_write_key_to_disk(config, compiler_specs):
def test_write_to_same_priority_file(config, compiler_specs):
# Write b_comps in the same file as a_comps.
spack.config.update_config(
'compilers', a_comps['compilers'], scope='low'
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='low'
)
spack.config.set('compilers', a_comps['compilers'], scope='low')
spack.config.set('compilers', b_comps['compilers'], scope='low')
# Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches()
spack.config.config().clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -283,10 +271,10 @@ def test_write_to_same_priority_file(config, compiler_specs):
# repos
def test_write_list_in_memory(config):
spack.config.update_config('repos', repos_low['repos'], scope='low')
spack.config.update_config('repos', repos_high['repos'], scope='high')
spack.config.set('repos', repos_low['repos'], scope='low')
spack.config.set('repos', repos_high['repos'], scope='high')
config = spack.config.get_config('repos')
config = spack.config.get('repos')
assert config == repos_high['repos'] + repos_low['repos']
@ -359,7 +347,7 @@ def test_merge_with_defaults(config, write_config_file):
"""
write_config_file('packages', packages_merge_low, 'low')
write_config_file('packages', packages_merge_high, 'high')
cfg = spack.config.get_config('packages')
cfg = spack.config.get('packages')
assert cfg['foo']['version'] == ['a']
assert cfg['bar']['version'] == ['b']
@ -383,13 +371,13 @@ def test_substitute_tempdir(config):
def test_read_config(config, write_config_file):
write_config_file('config', config_low, 'low')
assert spack.config.get_config('config') == config_low['config']
assert spack.config.get('config') == config_low['config']
def test_read_config_override_all(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_all, 'high')
assert spack.config.get_config('config') == {
assert spack.config.get('config') == {
'install_tree': 'override_all'
}
@ -397,7 +385,7 @@ def test_read_config_override_all(config, write_config_file):
def test_read_config_override_key(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_key, 'high')
assert spack.config.get_config('config') == {
assert spack.config.get('config') == {
'install_tree': 'override_key',
'build_stage': ['path1', 'path2', 'path3']
}
@ -406,7 +394,7 @@ def test_read_config_override_key(config, write_config_file):
def test_read_config_merge_list(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_merge_list, 'high')
assert spack.config.get_config('config') == {
assert spack.config.get('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
}
@ -415,7 +403,7 @@ def test_read_config_merge_list(config, write_config_file):
def test_read_config_override_list(config, write_config_file):
write_config_file('config', config_low, 'low')
write_config_file('config', config_override_list, 'high')
assert spack.config.get_config('config') == {
assert spack.config.get('config') == {
'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb']
}
@ -424,30 +412,30 @@ def test_read_config_override_list(config, write_config_file):
def test_internal_config_update(config, write_config_file):
write_config_file('config', config_low, 'low')
before = config.get_config('config')
before = config.get('config')
assert before['install_tree'] == 'install_tree_path'
# add an internal configuration scope
scope = spack.config.InternalConfigScope('commands')
scope = spack.config.InternalConfigScope('command_line')
assert 'InternalConfigScope' in repr(scope)
config.push_scope(scope)
command_config = config.get_config('config', scope='commands')
command_config = config.get('config', scope='command_line')
command_config['install_tree'] = 'foo/bar'
config.update_config('config', command_config, scope='commands')
config.set('config', command_config, scope='command_line')
after = config.get_config('config')
after = config.get('config')
assert after['install_tree'] == 'foo/bar'
def test_internal_config_filename(config, write_config_file):
write_config_file('config', config_low, 'low')
config.push_scope(spack.config.InternalConfigScope('commands'))
config.push_scope(spack.config.InternalConfigScope('command_line'))
with pytest.raises(NotImplementedError):
config.get_config_filename('commands', 'config')
config.get_config_filename('command_line', 'config')
def test_keys_are_ordered():

View File

@ -69,7 +69,7 @@ def check_mirror():
mirror_root = join_path(stage.path, 'test-mirror')
# register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
spack.config.update_config('mirrors', mirrors)
spack.config.set('mirrors', mirrors)
spack.mirror.create(mirror_root, repos, no_checksum=True)
# Stage directory exists

View File

@ -93,7 +93,7 @@ def _impl(spec_str):
@pytest.fixture()
def update_template_dirs(config, monkeypatch):
"""Mocks the template directories for tests"""
dirs = spack.config.get_config('config')['template_dirs']
dirs = spack.config.get('config:template_dirs')
dirs = [spack.util.path.canonicalize_path(x) for x in dirs]
monkeypatch.setattr(spack, 'template_dirs', dirs)

View File

@ -64,7 +64,7 @@ def test_all_versions_are_lowercase():
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get_config('packages', scope='defaults')
defaults = spack.config.get('packages', scope='defaults')
default_providers = defaults['all']['providers']
providers = spack.repo.provider_index.providers

View File

@ -114,7 +114,7 @@ def test_buildcache(mock_archive, tmpdir):
# register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_path}
spack.config.update_config('mirrors', mirrors)
spack.config.set('mirrors', mirrors)
stage = spack.stage.Stage(
mirrors['spack-mirror-test'], name="build_cache", keep=True)
@ -213,7 +213,7 @@ def test_buildcache(mock_archive, tmpdir):
# unregister mirror with spack config
mirrors = {}
spack.config.update_config('mirrors', mirrors)
spack.config.set('mirrors', mirrors)
shutil.rmtree(mirror_path)
stage.destroy()

View File

@ -114,15 +114,12 @@ def get_stage_path(stage, stage_name):
def tmpdir_for_stage(mock_archive):
"""Uses a temporary directory for staging"""
current = spack.paths.stage_path
spack.config.update_config(
spack.config.set(
'config',
{'build_stage': [str(mock_archive.test_tmp_dir)]},
scope='user'
)
scope='user')
yield
spack.config.update_config(
'config', {'build_stage': [current]}, scope='user'
)
spack.config.set('config', {'build_stage': [current]}, scope='user')
@pytest.fixture()
@ -138,9 +135,8 @@ def mock_archive(tmpdir, monkeypatch):
#
test_tmp_path = tmpdir.join('tmp')
# set _test_tmp_path as the default test directory to use for stages.
spack.config.update_config(
'config', {'build_stage': [str(test_tmp_path)]}, scope='user'
)
spack.config.set(
'config', {'build_stage': [str(test_tmp_path)]}, scope='user')
archive_dir = tmpdir.join('test-files')
archive_name = 'test-files.tar.gz'

View File

@ -91,7 +91,7 @@ class TestTengineEnvironment(object):
def test_template_retrieval(self):
"""Tests the template retrieval mechanism hooked into config files"""
# Check the directories are correct
template_dirs = spack.config.get_config('config')['template_dirs']
template_dirs = spack.config.get('config:template_dirs')
template_dirs = [canonicalize_path(x) for x in template_dirs]
assert len(template_dirs) == 3

View File

@ -41,8 +41,8 @@ if ($?SPACK_ROOT) then
# Shamelessly stolen from setup-env.sh
set _sp_sys_type = `$SPACK_ROOT/bin/spack python -c 'print(spack.architecture.sys_type())'`
set _sp_dotkit_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots').get('dotkit')))"`
set _sp_tcl_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots').get('tcl')))"`
set _sp_dotkit_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get('config:module_roots', {}).get('dotkit')))"`
set _sp_tcl_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get('config:module_roots', {}).get('tcl')))"`
# Set up modules and dotkit search paths in the user environment
_spack_pathadd DK_NODE "$_sp_dotkit_root/$_sp_sys_type"

View File

@ -239,8 +239,8 @@ fi;
_python_command=$(printf "%s\\\n%s\\\n%s" \
"print(\'_sp_sys_type={0}\'.format(spack.architecture.sys_type()))" \
"print(\'_sp_dotkit_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get_config(\'config\').get(\'module_roots\', {}).get(\'dotkit\'))))" \
"print(\'_sp_tcl_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get_config(\'config\').get(\'module_roots\', {}).get(\'tcl\'))))"
"print(\'_sp_dotkit_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get(\'config:module_roots\', {}).get(\'dotkit\'))))" \
"print(\'_sp_tcl_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get(\'config:module_roots\', {}).get(\'tcl\'))))"
)
_assignment_command=$(spack-python -c "exec('${_python_command}')")