config: simplify configuration API to just get and set

- replace `spack.config.get_configuration()` with `spack.config.config()`

- replace `get_config`/`update_config` with `get`, `set`

- add a path syntax that can be used to refer to specific config options
  without firt getting the entire configuration dict

- update usages of `get_config` and `update_config` to use `get` and `set`
This commit is contained in:
Todd Gamblin 2018-04-14 19:51:29 -07:00 committed by scheibelp
parent c9ea957563
commit 863ccd5e94
31 changed files with 207 additions and 173 deletions

View File

@ -41,35 +41,33 @@
from spack.util.path import canonicalize_path from spack.util.path import canonicalize_path
_config = spack.config.get_config('config')
# TODO: get this out of __init__.py # TODO: get this out of __init__.py
binary_cache_retrieved_specs = set() binary_cache_retrieved_specs = set()
#: Directories where to search for templates #: Directories where to search for templates
template_dirs = spack.config.get_config('config')['template_dirs'] template_dirs = spack.config.get('config:template_dirs')
template_dirs = [canonicalize_path(x) for x in template_dirs] template_dirs = [canonicalize_path(x) for x in template_dirs]
#: If this is enabled, tools that use SSL should not verify #: If this is enabled, tools that use SSL should not verify
#: certifiates. e.g., curl should use the -k option. #: certifiates. e.g., curl should use the -k option.
insecure = not _config.get('verify_ssl', True) insecure = not spack.config.get('config:verify_ssl', True)
#: Whether spack should allow installation of unsafe versions of software. #: Whether spack should allow installation of unsafe versions of software.
#: "Unsafe" versions are ones it doesn't have a checksum for. #: "Unsafe" versions are ones it doesn't have a checksum for.
do_checksum = _config.get('checksum', True) do_checksum = spack.config.get('config:checksum', True)
# If this is True, spack will not clean the environment to remove # If this is True, spack will not clean the environment to remove
# potentially harmful variables before builds. # potentially harmful variables before builds.
dirty = _config.get('dirty', False) dirty = spack.config.get('config:dirty', False)
#: The number of jobs to use when building in parallel. #: The number of jobs to use when building in parallel.
#: By default, use all cores on the machine. #: By default, use all cores on the machine.
build_jobs = _config.get('build_jobs', multiprocessing.cpu_count()) build_jobs = spack.config.get('config:build_jobs', multiprocessing.cpu_count())
#----------------------------------------------------------------------------- #-----------------------------------------------------------------------------

View File

@ -364,7 +364,7 @@ def download_tarball(spec):
Download binary tarball for given package into stage area Download binary tarball for given package into stage area
Return True if successful Return True if successful
""" """
mirrors = spack.config.get_config('mirrors') mirrors = spack.config.get('mirrors')
if len(mirrors) == 0: if len(mirrors) == 0:
tty.die("Please add a spack mirror to allow " + tty.die("Please add a spack mirror to allow " +
"download of pre-compiled packages.") "download of pre-compiled packages.")
@ -538,7 +538,7 @@ def get_specs(force=False):
previously_retrieved = spack.binary_cache_retrieved_specs previously_retrieved = spack.binary_cache_retrieved_specs
return previously_retrieved return previously_retrieved
mirrors = spack.config.get_config('mirrors') mirrors = spack.config.get('mirrors')
if len(mirrors) == 0: if len(mirrors) == 0:
tty.warn("No Spack mirrors are currently configured") tty.warn("No Spack mirrors are currently configured")
return {} return {}
@ -588,7 +588,7 @@ def get_keys(install=False, trust=False, force=False):
""" """
Get pgp public keys available on mirror Get pgp public keys available on mirror
""" """
mirrors = spack.config.get_config('mirrors') mirrors = spack.config.get('mirrors')
if len(mirrors) == 0: if len(mirrors) == 0:
tty.die("Please add a spack mirror to allow " + tty.die("Please add a spack mirror to allow " +
"download of build caches.") "download of build caches.")

View File

@ -44,8 +44,7 @@ def misc_cache():
global _misc_cache global _misc_cache
if _misc_cache is None: if _misc_cache is None:
config = spack.config.get_config('config') path = spack.config.get('config:misc_cache')
path = config.get('misc_cache')
if not path: if not path:
path = os.path.join(spack.paths.user_config_path, 'cache') path = os.path.join(spack.paths.user_config_path, 'cache')
path = canonicalize_path(path) path = canonicalize_path(path)
@ -63,8 +62,7 @@ def fetch_cache():
global _fetch_cache global _fetch_cache
if _fetch_cache is None: if _fetch_cache is None:
config = spack.config.get_config('config') path = spack.config.get('config:source_cache')
path = config.get('source_cache')
if not path: if not path:
path = os.path.join(spack.paths.var_path, "cache") path = os.path.join(spack.paths.var_path, "cache")
path = canonicalize_path(path) path = canonicalize_path(path)

View File

@ -49,8 +49,7 @@ def default_modify_scope():
Commands that modify configuration by default modify the *highest* Commands that modify configuration by default modify the *highest*
priority scope. priority scope.
""" """
config = spack.config.get_configuration() return spack.config.config().highest_precedence_scope().name
return config.highest_precedence_scope().name
def default_list_scope(): def default_list_scope():

View File

@ -46,7 +46,8 @@ def setup_parser(subparser):
sp = subparser.add_subparsers( sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command') metavar='SUBCOMMAND', dest='compiler_command')
scopes = spack.config.get_configuration().scopes scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Find # Find
find_parser = sp.add_parser( find_parser = sp.add_parser(
@ -54,7 +55,7 @@ def setup_parser(subparser):
help='search the system for compilers to add to Spack configuration') help='search the system for compilers to add to Spack configuration')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER) find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument( find_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
@ -66,14 +67,14 @@ def setup_parser(subparser):
help='remove ALL compilers that match spec') help='remove ALL compilers that match spec')
remove_parser.add_argument('compiler_spec') remove_parser.add_argument('compiler_spec')
remove_parser.add_argument( remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
# List # List
list_parser = sp.add_parser('list', help='list available compilers') list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument( list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(), default=spack.cmd.default_list_scope(),
help="configuration scope to read from") help="configuration scope to read from")
@ -81,7 +82,7 @@ def setup_parser(subparser):
info_parser = sp.add_parser('info', help='show compiler paths') info_parser = sp.add_parser('info', help='show compiler paths')
info_parser.add_argument('compiler_spec') info_parser.add_argument('compiler_spec')
info_parser.add_argument( info_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(), default=spack.cmd.default_list_scope(),
help="configuration scope to read from") help="configuration scope to read from")
@ -114,7 +115,7 @@ def compiler_find(args):
n = len(new_compilers) n = len(new_compilers)
s = 's' if n > 1 else '' s = 's' if n > 1 else ''
config = spack.config.get_configuration() config = spack.config.config()
filename = config.get_config_filename(args.scope, 'compilers') filename = config.get_config_filename(args.scope, 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4) colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)

View File

@ -31,9 +31,11 @@
def setup_parser(subparser): def setup_parser(subparser):
scopes = spack.config.get_configuration().scopes scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
subparser.add_argument( subparser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
help="configuration scope to read/modify") help="configuration scope to read/modify")

View File

@ -30,10 +30,12 @@
def setup_parser(subparser): def setup_parser(subparser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# User can only choose one # User can only choose one
subparser.add_argument( subparser.add_argument(
'--scope', choices=spack.config.get_configuration().scopes, '--scope', choices=scopes, metavar=scopes_metavar,
metavar=spack.config.scopes_metavar,
help="configuration scope to read/modify") help="configuration scope to read/modify")
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
@ -54,8 +56,7 @@ def setup_parser(subparser):
def config_get(args): def config_get(args):
config = spack.config.get_configuration() spack.config.config().print_section(args.section)
config.print_section(args.section)
def config_edit(args): def config_edit(args):
@ -67,7 +68,7 @@ def config_edit(args):
if not args.section: if not args.section:
args.section = None args.section = None
config = spack.config.get_configuration() config = spack.config.config()
config_file = config.get_config_filename(args.scope, args.section) config_file = config.get_config_filename(args.scope, args.section)
spack.editor(config_file) spack.editor(config_file)

View File

@ -67,7 +67,9 @@ def setup_parser(subparser):
const=1, default=0, const=1, default=0,
help="only fetch one 'preferred' version per spec, not all known") help="only fetch one 'preferred' version per spec, not all known")
scopes = spack.config.get_configuration().scopes # used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Add # Add
add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser = sp.add_parser('add', help=mirror_add.__doc__)
@ -75,7 +77,7 @@ def setup_parser(subparser):
add_parser.add_argument( add_parser.add_argument(
'url', help="url of mirror directory from 'spack mirror create'") 'url', help="url of mirror directory from 'spack mirror create'")
add_parser.add_argument( add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
@ -84,14 +86,14 @@ def setup_parser(subparser):
help=mirror_remove.__doc__) help=mirror_remove.__doc__)
remove_parser.add_argument('name') remove_parser.add_argument('name')
remove_parser.add_argument( remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
# List # List
list_parser = sp.add_parser('list', help=mirror_list.__doc__) list_parser = sp.add_parser('list', help=mirror_list.__doc__)
list_parser.add_argument( list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(), default=spack.cmd.default_list_scope(),
help="configuration scope to read from") help="configuration scope to read from")
@ -102,7 +104,7 @@ def mirror_add(args):
if url.startswith('/'): if url.startswith('/'):
url = 'file://' + url url = 'file://' + url
mirrors = spack.config.get_config('mirrors', scope=args.scope) mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors: if not mirrors:
mirrors = syaml_dict() mirrors = syaml_dict()
@ -116,14 +118,14 @@ def mirror_add(args):
items = [(n, u) for n, u in mirrors.items()] items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url)) items.insert(0, (args.name, url))
mirrors = syaml_dict(items) mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope) spack.config.set('mirrors', mirrors, scope=args.scope)
def mirror_remove(args): def mirror_remove(args):
"""Remove a mirror by name.""" """Remove a mirror by name."""
name = args.name name = args.name
mirrors = spack.config.get_config('mirrors', scope=args.scope) mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors: if not mirrors:
mirrors = syaml_dict() mirrors = syaml_dict()
@ -131,13 +133,13 @@ def mirror_remove(args):
tty.die("No mirror with name %s" % name) tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name) old_value = mirrors.pop(name)
spack.config.update_config('mirrors', mirrors, scope=args.scope) spack.config.set('mirrors', mirrors, scope=args.scope)
tty.msg("Removed mirror %s with url %s" % (name, old_value)) tty.msg("Removed mirror %s with url %s" % (name, old_value))
def mirror_list(args): def mirror_list(args):
"""Print out available mirrors to the console.""" """Print out available mirrors to the console."""
mirrors = spack.config.get_config('mirrors', scope=args.scope) mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors: if not mirrors:
tty.msg("No mirrors configured.") tty.msg("No mirrors configured.")
return return

View File

@ -39,7 +39,8 @@
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.get_configuration().scopes scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Create # Create
create_parser = sp.add_parser('create', help=repo_create.__doc__) create_parser = sp.add_parser('create', help=repo_create.__doc__)
@ -52,7 +53,7 @@ def setup_parser(subparser):
# List # List
list_parser = sp.add_parser('list', help=repo_list.__doc__) list_parser = sp.add_parser('list', help=repo_list.__doc__)
list_parser.add_argument( list_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_list_scope(), default=spack.cmd.default_list_scope(),
help="configuration scope to read from") help="configuration scope to read from")
@ -61,7 +62,7 @@ def setup_parser(subparser):
add_parser.add_argument( add_parser.add_argument(
'path', help="path to a Spack package repository directory") 'path', help="path to a Spack package repository directory")
add_parser.add_argument( add_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
@ -72,7 +73,7 @@ def setup_parser(subparser):
'path_or_namespace', 'path_or_namespace',
help="path or namespace of a Spack package repository") help="path or namespace of a Spack package repository")
remove_parser.add_argument( remove_parser.add_argument(
'--scope', choices=scopes, metavar=spack.config.scopes_metavar, '--scope', choices=scopes, metavar=scopes_metavar,
default=spack.cmd.default_modify_scope(), default=spack.cmd.default_modify_scope(),
help="configuration scope to modify") help="configuration scope to modify")
@ -104,7 +105,7 @@ def repo_add(args):
repo = Repo(canon_path) repo = Repo(canon_path)
# If that succeeds, finally add it to the configuration. # If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope) repos = spack.config.get('repos', scope=args.scope)
if not repos: if not repos:
repos = [] repos = []
@ -112,13 +113,13 @@ def repo_add(args):
tty.die("Repository is already registered with Spack: %s" % path) tty.die("Repository is already registered with Spack: %s" % path)
repos.insert(0, canon_path) repos.insert(0, canon_path)
spack.config.update_config('repos', repos, args.scope) spack.config.set('repos', repos, args.scope)
tty.msg("Added repo with namespace '%s'." % repo.namespace) tty.msg("Added repo with namespace '%s'." % repo.namespace)
def repo_remove(args): def repo_remove(args):
"""Remove a repository from Spack's configuration.""" """Remove a repository from Spack's configuration."""
repos = spack.config.get_config('repos', args.scope) repos = spack.config.get('repos', scope=args.scope)
path_or_namespace = args.path_or_namespace path_or_namespace = args.path_or_namespace
# If the argument is a path, remove that repository from config. # If the argument is a path, remove that repository from config.
@ -127,7 +128,7 @@ def repo_remove(args):
repo_canon_path = canonicalize_path(repo_path) repo_canon_path = canonicalize_path(repo_path)
if canon_path == repo_canon_path: if canon_path == repo_canon_path:
repos.remove(repo_path) repos.remove(repo_path)
spack.config.update_config('repos', repos, args.scope) spack.config.set('repos', repos, args.scope)
tty.msg("Removed repository %s" % repo_path) tty.msg("Removed repository %s" % repo_path)
return return
@ -137,7 +138,7 @@ def repo_remove(args):
repo = Repo(path) repo = Repo(path)
if repo.namespace == path_or_namespace: if repo.namespace == path_or_namespace:
repos.remove(path) repos.remove(path)
spack.config.update_config('repos', repos, args.scope) spack.config.set('repos', repos, args.scope)
tty.msg("Removed repository %s with namespace '%s'." tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace)) % (repo.root, repo.namespace))
return return
@ -150,7 +151,7 @@ def repo_remove(args):
def repo_list(args): def repo_list(args):
"""Show registered repositories and their namespaces.""" """Show registered repositories and their namespaces."""
roots = spack.config.get_config('repos', args.scope) roots = spack.config.get('repos', scope=args.scope)
repos = [] repos = []
for r in roots: for r in roots:
try: try:

View File

@ -88,9 +88,9 @@ def init_compiler_config():
compilers_dict = [] compilers_dict = []
for compiler in compilers: for compiler in compilers:
compilers_dict.append(_to_dict(compiler)) compilers_dict.append(_to_dict(compiler))
spack.config.update_config('compilers', compilers_dict, scope=scope) spack.config.set('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope) config = spack.config.get('compilers', scope=scope)
# Update the configuration if there are currently no compilers # Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site # configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones. # compilers configured but no user ones.
@ -98,15 +98,15 @@ def init_compiler_config():
if scope is None: if scope is None:
# We know no compilers were configured in any scope. # We know no compilers were configured in any scope.
init_compiler_config() init_compiler_config()
config = spack.config.get_config('compilers', scope=scope) config = spack.config.get('compilers', scope=scope)
elif scope == 'user': elif scope == 'user':
# Check the site config and update the user config if # Check the site config and update the user config if
# nothing is configured at the site level. # nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site') site_config = spack.config.get('compilers', scope='site')
sys_config = spack.config.get_config('compilers', scope='system') sys_config = spack.config.get('compilers', scope='system')
if not site_config and not sys_config: if not site_config and not sys_config:
init_compiler_config() init_compiler_config()
config = spack.config.get_config('compilers', scope=scope) config = spack.config.get('compilers', scope=scope)
return config return config
elif config: elif config:
return config return config
@ -116,10 +116,10 @@ def init_compiler_config():
def compiler_config_files(): def compiler_config_files():
config_files = list() config_files = list()
config = spack.config.get_configuration() config = spack.config.config()
for scope in config.file_scopes: for scope in config.file_scopes:
name = scope.name name = scope.name
compiler_config = config.get_config('compilers', scope=name) compiler_config = config.get('compilers', scope=name)
if compiler_config: if compiler_config:
config_files.append(config.get_config_filename(name, 'compilers')) config_files.append(config.get_config_filename(name, 'compilers'))
return config_files return config_files
@ -137,7 +137,7 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
compiler_config.append(_to_dict(compiler)) compiler_config.append(_to_dict(compiler))
global _cache_config_file global _cache_config_file
_cache_config_file = compiler_config _cache_config_file = compiler_config
spack.config.update_config('compilers', compiler_config, scope) spack.config.set('compilers', compiler_config, scope=scope)
@_auto_compiler_spec @_auto_compiler_spec
@ -162,7 +162,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
_cache_config_file = filtered_compiler_config _cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec) CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.update_config('compilers', filtered_compiler_config, scope) spack.config.set('compilers', filtered_compiler_config, scope=scope)
def all_compilers_config(scope=None, init_config=True): def all_compilers_config(scope=None, init_config=True):
@ -339,7 +339,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
@_auto_compiler_spec @_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec): def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.get_configuration() config = spack.config.config()
scope_to_compilers = {} scope_to_compilers = {}
for scope in config.scopes: for scope in config.scopes:

View File

@ -109,10 +109,6 @@
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]' scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
#: config scopes only used by Spack internally
internal_scopes = ['commands']
def _extend_with_default(validator_class): def _extend_with_default(validator_class):
"""Add support for the 'default' attr for properties and patternProperties. """Add support for the 'default' attr for properties and patternProperties.
@ -379,6 +375,54 @@ def get_config(self, section, scope=None):
# take the top key off before returning. # take the top key off before returning.
return merged_section[section] return merged_section[section]
def get(self, path, default=None, scope=None):
"""Get a config section or a single value from one.
Accepts a path syntax that allows us to grab nested config map
entries. Getting the 'config' section would look like::
spack.config.get('config')
and the ``dirty`` section in the ``config`` scope would be::
spack.config.get('config:dirty')
We use ``:`` as the separator, like YAML objects.
"""
# TODO: Currently only handles maps. Think about lists if neded.
section, _, rest = path.partition(':')
value = self.get_config(section, scope=scope)
if not rest:
return value
parts = rest.split(':')
while parts:
key = parts.pop(0)
value = value.get(key, default)
return value
def set(self, path, value, scope=None):
"""Convenience function for setting single values in config files.
Accepts the path syntax described in ``get()``.
"""
section, _, rest = path.partition(':')
if not rest:
self.update_config(section, value, scope=scope)
else:
section_data = self.get_config(section, scope=scope)
parts = rest.split(':')
data = section_data
while len(parts) > 1:
key = parts.pop(0)
data = data[key]
data[parts[0]] = value
self.update_config(section, section_data, scope=scope)
def __iter__(self): def __iter__(self):
"""Iterate over scopes in this configuration.""" """Iterate over scopes in this configuration."""
for scope in self.scopes.values(): for scope in self.scopes.values():
@ -394,15 +438,15 @@ def print_section(self, section):
raise ConfigError("Error reading configuration: %s" % section) raise ConfigError("Error reading configuration: %s" % section)
def get_configuration(): def config():
"""This constructs Spack's standard configuration scopes """Singleton Configuration instance.
This is a singleton; it constructs one instance associated with this This constructs one instance associated with this module and returns
module and returns it. It is bundled inside a function so that it. It is bundled inside a function so that configuratoin can be
configuratoin can be initialized lazily. initialized lazily.
Return: Return:
Configuration: object for accessing spack configuration (Configuration): object for accessing spack configuration
""" """
global _configuration global _configuration
@ -423,7 +467,7 @@ def get_configuration():
# we make a special scope for spack commands so that they can # we make a special scope for spack commands so that they can
# override configuration options. # override configuration options.
_configuration.push_scope(InternalConfigScope('commands')) _configuration.push_scope(InternalConfigScope('command_line'))
return _configuration return _configuration
@ -433,17 +477,22 @@ def get_configuration():
_configuration = None _configuration = None
#: TODO: consider getting rid of these top-level wrapper functions. def get(path, default=None, scope=None):
def get_config(section, scope=None): """Module-level wrapper for ``Configuration.get()``."""
"""Module-level interface for ``Configuration.get_config()``.""" return config().get(path, default, scope)
config = get_configuration()
return config.get_config(section, scope)
def update_config(section, update_data, scope=None): def set(path, value, scope=None):
"""Module-level interface for ``Configuration.update_config()``.""" """Convenience function for getting single values in config files.
config = get_configuration()
return config.update_config(section, update_data, scope) Accepts the path syntax described in ``get()``.
"""
return config().set(path, value, scope)
def scopes():
"""Convenience function to get list of configuration scopes."""
return config().scopes
def _validate_section_name(section): def _validate_section_name(section):

View File

@ -27,7 +27,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
try: try:
enabled = spack.modules.common.configuration['enable'] enabled = spack.config.get('modules:enable')
except KeyError: except KeyError:
tty.debug('NO MODULE WRITTEN: list of enabled module files is empty') tty.debug('NO MODULE WRITTEN: list of enabled module files is empty')
enabled = [] enabled = []

View File

@ -36,7 +36,7 @@ def pre_run():
def check_compiler_yaml_version(): def check_compiler_yaml_version():
config = spack.config.get_configuration() config = spack.config.config()
for scope in config.file_scopes: for scope in config.file_scopes:
file_name = os.path.join(scope.path, 'compilers.yaml') file_name = os.path.join(scope.path, 'compilers.yaml')

View File

@ -65,14 +65,14 @@
import spack.util.environment import spack.util.environment
import spack.error import spack.error
#: Root folders where the various module files should be written #: config section for this file
roots = spack.config.get_config('config').get('module_roots', {}) configuration = spack.config.get('modules')
#: Merged modules.yaml as a dictionary #: Root folders where the various module files should be written
configuration = spack.config.get_config('modules') roots = spack.config.get('config:module_roots', {})
#: Inspections that needs to be done on spec prefixes #: Inspections that needs to be done on spec prefixes
prefix_inspections = configuration.get('prefix_inspections', {}) prefix_inspections = spack.config.get('modules:prefix_inspections', {})
#: Valid tokens for naming scheme and env variable names #: Valid tokens for naming scheme and env variable names
_valid_tokens = ( _valid_tokens = (

View File

@ -25,11 +25,12 @@
"""This module implements the classes necessary to generate dotkit modules.""" """This module implements the classes necessary to generate dotkit modules."""
import os.path import os.path
import spack.config
from .common import BaseConfiguration, BaseFileLayout from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration from .common import BaseContext, BaseModuleFileWriter
#: Dotkit specific part of the configuration #: Dotkit specific part of the configuration
configuration = configuration.get('dotkit', {}) configuration = spack.config.get('modules:dotkit', {})
#: Caches the configuration {spec_hash: configuration} #: Caches the configuration {spec_hash: configuration}
configuration_registry = {} configuration_registry = {}

View File

@ -22,21 +22,23 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import os.path import os.path
import llnl.util.lang as lang import llnl.util.lang as lang
import itertools
import collections
import spack.config
import spack.compilers import spack.compilers
import spack.spec import spack.spec
import spack.error import spack.error
import itertools
import collections
import spack.tengine as tengine import spack.tengine as tengine
from .common import BaseConfiguration, BaseFileLayout from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration from .common import BaseContext, BaseModuleFileWriter
#: LMOD specific part of the configuration #: TCL specific part of the configuration
configuration = configuration.get('lmod', {}) configuration = spack.config.get('modules:lmod', {})
#: Caches the configuration {spec_hash: configuration} #: Caches the configuration {spec_hash: configuration}
configuration_registry = {} configuration_registry = {}

View File

@ -27,15 +27,16 @@
""" """
import os.path import os.path
import string import string
import spack.tengine as tengine
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.config
import spack.tengine as tengine
from .common import BaseConfiguration, BaseFileLayout from .common import BaseConfiguration, BaseFileLayout
from .common import BaseContext, BaseModuleFileWriter, configuration from .common import BaseContext, BaseModuleFileWriter
#: TCL specific part of the configuration #: TCL specific part of the configuration
configuration = configuration.get('tcl', {}) configuration = spack.config.get('modules:tcl', {})
#: Caches the configuration {spec_hash: configuration} #: Caches the configuration {spec_hash: configuration}
configuration_registry = {} configuration_registry = {}

View File

@ -44,7 +44,7 @@ def _spec_type(component):
def get_packages_config(): def get_packages_config():
"""Wrapper around get_packages_config() to validate semantics.""" """Wrapper around get_packages_config() to validate semantics."""
config = spack.config.get_config('packages') config = spack.config.get('packages')
# Get a list of virtuals from packages.yaml. Note that because we # Get a list of virtuals from packages.yaml. Note that because we
# check spack.repo, this collects virtuals that are actually provided # check spack.repo, this collects virtuals that are actually provided

View File

@ -363,7 +363,7 @@ def __init__(self, *repo_dirs, **kwargs):
# If repo_dirs is empty, just use the configuration # If repo_dirs is empty, just use the configuration
if not repo_dirs: if not repo_dirs:
import spack.config import spack.config
repo_dirs = spack.config.get_config('repos') repo_dirs = spack.config.get('repos')
if not repo_dirs: if not repo_dirs:
raise NoRepoConfiguredError( raise NoRepoConfiguredError(
"Spack configuration contains no package repositories.") "Spack configuration contains no package repositories.")

View File

@ -84,8 +84,7 @@ def get_tmp_root():
return None return None
if _tmp_root is None: if _tmp_root is None:
config = spack.config.get_config('config') candidates = spack.config.get('config:build_stage')
candidates = config['build_stage']
if isinstance(candidates, string_types): if isinstance(candidates, string_types):
candidates = [candidates] candidates = [candidates]
@ -378,7 +377,7 @@ def fetch(self, mirror_only=False):
# TODO: CompositeFetchStrategy here. # TODO: CompositeFetchStrategy here.
self.skip_checksum_for_mirror = True self.skip_checksum_for_mirror = True
if self.mirror_path: if self.mirror_path:
mirrors = spack.config.get_config('mirrors') mirrors = spack.config.get('mirrors')
# Join URLs of mirror roots with mirror paths. Because # Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in # urljoin() will strip everything past the final '/' in
@ -652,7 +651,7 @@ def cache_local(self):
def _get_mirrors(): def _get_mirrors():
"""Get mirrors from spack configuration.""" """Get mirrors from spack configuration."""
config = spack.config.get_config('mirrors') config = spack.config.get('mirrors')
return [val for name, val in iteritems(config)] return [val for name, val in iteritems(config)]

View File

@ -53,16 +53,11 @@
__author__ = "Benedikt Hegner (CERN)" __author__ = "Benedikt Hegner (CERN)"
__all__ = ['db', 'extensions', 'layout', 'root'] __all__ = ['db', 'extensions', 'layout', 'root']
#
# Read in the config
#
config = spack.config.get_config("config")
# #
# Set up the install path # Set up the install path
# #
root = canonicalize_path( root = canonicalize_path(spack.config.get(
config.get('install_tree', os.path.join(spack.paths.opt_path, 'spack'))) 'config:install_tree', os.path.join(spack.paths.opt_path, 'spack')))
# #
# Set up the installed packages database # Set up the installed packages database
@ -73,8 +68,9 @@
# This controls how spack lays out install prefixes and # This controls how spack lays out install prefixes and
# stage directories. # stage directories.
# #
layout = YamlDirectoryLayout(root, layout = YamlDirectoryLayout(
hash_len=config.get('install_hash_length'), root,
path_scheme=config.get('install_path_scheme')) hash_len=spack.config.get('config:install_hash_length'),
path_scheme=spack.config.get('config:install_path_scheme'))
extensions = YamlExtensionsLayout(root, layout) extensions = YamlExtensionsLayout(root, layout)

View File

@ -51,7 +51,7 @@ def concretize(abstract_spec):
def update_packages(pkgname, section, value): def update_packages(pkgname, section, value):
"""Update config and reread package list""" """Update config and reread package list"""
conf = {pkgname: {section: value}} conf = {pkgname: {section: value}}
spack.config.update_config('packages', conf, 'concretize') spack.config.set('packages', conf, scope='concretize')
spack.package_prefs.PackagePrefs.clear_caches() spack.package_prefs.PackagePrefs.clear_caches()
@ -133,7 +133,7 @@ def test_no_virtuals_in_packages_yaml(self):
paths: paths:
mpi-with-lapack@2.1: /path/to/lapack mpi-with-lapack@2.1: /path/to/lapack
""") """)
spack.config.update_config('packages', conf, 'concretize') spack.config.set('packages', conf, scope='concretize')
# now when we get the packages.yaml config, there should be an error # now when we get the packages.yaml config, there should be an error
with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError): with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
@ -145,7 +145,7 @@ def test_all_is_not_a_virtual(self):
all: all:
variants: [+mpi] variants: [+mpi]
""") """)
spack.config.update_config('packages', conf, 'concretize') spack.config.set('packages', conf, scope='concretize')
# should be no error for 'all': # should be no error for 'all':
spack.package_prefs.PackagePrefs.clear_caches() spack.package_prefs.PackagePrefs.clear_caches()
@ -167,7 +167,7 @@ def test_external_mpi(self):
paths: paths:
mpich@3.0.4: /dummy/path mpich@3.0.4: /dummy/path
""") """)
spack.config.update_config('packages', conf, 'concretize') spack.config.set('packages', conf, scope='concretize')
# ensure that once config is in place, external is used # ensure that once config is in place, external is used
spec = Spec('mpi') spec = Spec('mpi')

View File

@ -85,7 +85,7 @@ def _write(config, data, scope):
def check_compiler_config(comps, *compiler_names): def check_compiler_config(comps, *compiler_names):
"""Check that named compilers in comps match Spack's config.""" """Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers') config = spack.config.get('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc'] compiler_list = ['cc', 'cxx', 'f77', 'fc']
flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags', flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
'ldflags', 'ldlibs'] 'ldflags', 'ldlibs']
@ -228,12 +228,8 @@ def compiler_specs():
def test_write_key_in_memory(config, compiler_specs): def test_write_key_in_memory(config, compiler_specs):
# Write b_comps "on top of" a_comps. # Write b_comps "on top of" a_comps.
spack.config.update_config( spack.config.set('compilers', a_comps['compilers'], scope='low')
'compilers', a_comps['compilers'], scope='low' spack.config.set('compilers', b_comps['compilers'], scope='high')
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Make sure the config looks how we expect. # Make sure the config looks how we expect.
check_compiler_config(a_comps['compilers'], *compiler_specs.a) check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -242,15 +238,11 @@ def test_write_key_in_memory(config, compiler_specs):
def test_write_key_to_disk(config, compiler_specs): def test_write_key_to_disk(config, compiler_specs):
# Write b_comps "on top of" a_comps. # Write b_comps "on top of" a_comps.
spack.config.update_config( spack.config.set('compilers', a_comps['compilers'], scope='low')
'compilers', a_comps['compilers'], scope='low' spack.config.set('compilers', b_comps['compilers'], scope='high')
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='high'
)
# Clear caches so we're forced to read from disk. # Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches() spack.config.config().clear_caches()
# Same check again, to ensure consistency. # Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a) check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -259,15 +251,11 @@ def test_write_key_to_disk(config, compiler_specs):
def test_write_to_same_priority_file(config, compiler_specs): def test_write_to_same_priority_file(config, compiler_specs):
# Write b_comps in the same file as a_comps. # Write b_comps in the same file as a_comps.
spack.config.update_config( spack.config.set('compilers', a_comps['compilers'], scope='low')
'compilers', a_comps['compilers'], scope='low' spack.config.set('compilers', b_comps['compilers'], scope='low')
)
spack.config.update_config(
'compilers', b_comps['compilers'], scope='low'
)
# Clear caches so we're forced to read from disk. # Clear caches so we're forced to read from disk.
spack.config.get_configuration().clear_caches() spack.config.config().clear_caches()
# Same check again, to ensure consistency. # Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a) check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@ -283,10 +271,10 @@ def test_write_to_same_priority_file(config, compiler_specs):
# repos # repos
def test_write_list_in_memory(config): def test_write_list_in_memory(config):
spack.config.update_config('repos', repos_low['repos'], scope='low') spack.config.set('repos', repos_low['repos'], scope='low')
spack.config.update_config('repos', repos_high['repos'], scope='high') spack.config.set('repos', repos_high['repos'], scope='high')
config = spack.config.get_config('repos') config = spack.config.get('repos')
assert config == repos_high['repos'] + repos_low['repos'] assert config == repos_high['repos'] + repos_low['repos']
@ -359,7 +347,7 @@ def test_merge_with_defaults(config, write_config_file):
""" """
write_config_file('packages', packages_merge_low, 'low') write_config_file('packages', packages_merge_low, 'low')
write_config_file('packages', packages_merge_high, 'high') write_config_file('packages', packages_merge_high, 'high')
cfg = spack.config.get_config('packages') cfg = spack.config.get('packages')
assert cfg['foo']['version'] == ['a'] assert cfg['foo']['version'] == ['a']
assert cfg['bar']['version'] == ['b'] assert cfg['bar']['version'] == ['b']
@ -383,13 +371,13 @@ def test_substitute_tempdir(config):
def test_read_config(config, write_config_file): def test_read_config(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
assert spack.config.get_config('config') == config_low['config'] assert spack.config.get('config') == config_low['config']
def test_read_config_override_all(config, write_config_file): def test_read_config_override_all(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
write_config_file('config', config_override_all, 'high') write_config_file('config', config_override_all, 'high')
assert spack.config.get_config('config') == { assert spack.config.get('config') == {
'install_tree': 'override_all' 'install_tree': 'override_all'
} }
@ -397,7 +385,7 @@ def test_read_config_override_all(config, write_config_file):
def test_read_config_override_key(config, write_config_file): def test_read_config_override_key(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
write_config_file('config', config_override_key, 'high') write_config_file('config', config_override_key, 'high')
assert spack.config.get_config('config') == { assert spack.config.get('config') == {
'install_tree': 'override_key', 'install_tree': 'override_key',
'build_stage': ['path1', 'path2', 'path3'] 'build_stage': ['path1', 'path2', 'path3']
} }
@ -406,7 +394,7 @@ def test_read_config_override_key(config, write_config_file):
def test_read_config_merge_list(config, write_config_file): def test_read_config_merge_list(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
write_config_file('config', config_merge_list, 'high') write_config_file('config', config_merge_list, 'high')
assert spack.config.get_config('config') == { assert spack.config.get('config') == {
'install_tree': 'install_tree_path', 'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3'] 'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
} }
@ -415,7 +403,7 @@ def test_read_config_merge_list(config, write_config_file):
def test_read_config_override_list(config, write_config_file): def test_read_config_override_list(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
write_config_file('config', config_override_list, 'high') write_config_file('config', config_override_list, 'high')
assert spack.config.get_config('config') == { assert spack.config.get('config') == {
'install_tree': 'install_tree_path', 'install_tree': 'install_tree_path',
'build_stage': ['patha', 'pathb'] 'build_stage': ['patha', 'pathb']
} }
@ -424,30 +412,30 @@ def test_read_config_override_list(config, write_config_file):
def test_internal_config_update(config, write_config_file): def test_internal_config_update(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
before = config.get_config('config') before = config.get('config')
assert before['install_tree'] == 'install_tree_path' assert before['install_tree'] == 'install_tree_path'
# add an internal configuration scope # add an internal configuration scope
scope = spack.config.InternalConfigScope('commands') scope = spack.config.InternalConfigScope('command_line')
assert 'InternalConfigScope' in repr(scope) assert 'InternalConfigScope' in repr(scope)
config.push_scope(scope) config.push_scope(scope)
command_config = config.get_config('config', scope='commands') command_config = config.get('config', scope='command_line')
command_config['install_tree'] = 'foo/bar' command_config['install_tree'] = 'foo/bar'
config.update_config('config', command_config, scope='commands') config.set('config', command_config, scope='command_line')
after = config.get_config('config') after = config.get('config')
assert after['install_tree'] == 'foo/bar' assert after['install_tree'] == 'foo/bar'
def test_internal_config_filename(config, write_config_file): def test_internal_config_filename(config, write_config_file):
write_config_file('config', config_low, 'low') write_config_file('config', config_low, 'low')
config.push_scope(spack.config.InternalConfigScope('commands')) config.push_scope(spack.config.InternalConfigScope('command_line'))
with pytest.raises(NotImplementedError): with pytest.raises(NotImplementedError):
config.get_config_filename('commands', 'config') config.get_config_filename('command_line', 'config')
def test_keys_are_ordered(): def test_keys_are_ordered():

View File

@ -69,7 +69,7 @@ def check_mirror():
mirror_root = join_path(stage.path, 'test-mirror') mirror_root = join_path(stage.path, 'test-mirror')
# register mirror with spack config # register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_root} mirrors = {'spack-mirror-test': 'file://' + mirror_root}
spack.config.update_config('mirrors', mirrors) spack.config.set('mirrors', mirrors)
spack.mirror.create(mirror_root, repos, no_checksum=True) spack.mirror.create(mirror_root, repos, no_checksum=True)
# Stage directory exists # Stage directory exists

View File

@ -93,7 +93,7 @@ def _impl(spec_str):
@pytest.fixture() @pytest.fixture()
def update_template_dirs(config, monkeypatch): def update_template_dirs(config, monkeypatch):
"""Mocks the template directories for tests""" """Mocks the template directories for tests"""
dirs = spack.config.get_config('config')['template_dirs'] dirs = spack.config.get('config:template_dirs')
dirs = [spack.util.path.canonicalize_path(x) for x in dirs] dirs = [spack.util.path.canonicalize_path(x) for x in dirs]
monkeypatch.setattr(spack, 'template_dirs', dirs) monkeypatch.setattr(spack, 'template_dirs', dirs)

View File

@ -64,7 +64,7 @@ def test_all_versions_are_lowercase():
def test_all_virtual_packages_have_default_providers(): def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set.""" """All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get_config('packages', scope='defaults') defaults = spack.config.get('packages', scope='defaults')
default_providers = defaults['all']['providers'] default_providers = defaults['all']['providers']
providers = spack.repo.provider_index.providers providers = spack.repo.provider_index.providers

View File

@ -114,7 +114,7 @@ def test_buildcache(mock_archive, tmpdir):
# register mirror with spack config # register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_path} mirrors = {'spack-mirror-test': 'file://' + mirror_path}
spack.config.update_config('mirrors', mirrors) spack.config.set('mirrors', mirrors)
stage = spack.stage.Stage( stage = spack.stage.Stage(
mirrors['spack-mirror-test'], name="build_cache", keep=True) mirrors['spack-mirror-test'], name="build_cache", keep=True)
@ -213,7 +213,7 @@ def test_buildcache(mock_archive, tmpdir):
# unregister mirror with spack config # unregister mirror with spack config
mirrors = {} mirrors = {}
spack.config.update_config('mirrors', mirrors) spack.config.set('mirrors', mirrors)
shutil.rmtree(mirror_path) shutil.rmtree(mirror_path)
stage.destroy() stage.destroy()

View File

@ -114,15 +114,12 @@ def get_stage_path(stage, stage_name):
def tmpdir_for_stage(mock_archive): def tmpdir_for_stage(mock_archive):
"""Uses a temporary directory for staging""" """Uses a temporary directory for staging"""
current = spack.paths.stage_path current = spack.paths.stage_path
spack.config.update_config( spack.config.set(
'config', 'config',
{'build_stage': [str(mock_archive.test_tmp_dir)]}, {'build_stage': [str(mock_archive.test_tmp_dir)]},
scope='user' scope='user')
)
yield yield
spack.config.update_config( spack.config.set('config', {'build_stage': [current]}, scope='user')
'config', {'build_stage': [current]}, scope='user'
)
@pytest.fixture() @pytest.fixture()
@ -138,9 +135,8 @@ def mock_archive(tmpdir, monkeypatch):
# #
test_tmp_path = tmpdir.join('tmp') test_tmp_path = tmpdir.join('tmp')
# set _test_tmp_path as the default test directory to use for stages. # set _test_tmp_path as the default test directory to use for stages.
spack.config.update_config( spack.config.set(
'config', {'build_stage': [str(test_tmp_path)]}, scope='user' 'config', {'build_stage': [str(test_tmp_path)]}, scope='user')
)
archive_dir = tmpdir.join('test-files') archive_dir = tmpdir.join('test-files')
archive_name = 'test-files.tar.gz' archive_name = 'test-files.tar.gz'

View File

@ -91,7 +91,7 @@ class TestTengineEnvironment(object):
def test_template_retrieval(self): def test_template_retrieval(self):
"""Tests the template retrieval mechanism hooked into config files""" """Tests the template retrieval mechanism hooked into config files"""
# Check the directories are correct # Check the directories are correct
template_dirs = spack.config.get_config('config')['template_dirs'] template_dirs = spack.config.get('config:template_dirs')
template_dirs = [canonicalize_path(x) for x in template_dirs] template_dirs = [canonicalize_path(x) for x in template_dirs]
assert len(template_dirs) == 3 assert len(template_dirs) == 3

View File

@ -41,8 +41,8 @@ if ($?SPACK_ROOT) then
# Shamelessly stolen from setup-env.sh # Shamelessly stolen from setup-env.sh
set _sp_sys_type = `$SPACK_ROOT/bin/spack python -c 'print(spack.architecture.sys_type())'` set _sp_sys_type = `$SPACK_ROOT/bin/spack python -c 'print(spack.architecture.sys_type())'`
set _sp_dotkit_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots').get('dotkit')))"` set _sp_dotkit_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get('config:module_roots', {}).get('dotkit')))"`
set _sp_tcl_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots').get('tcl')))"` set _sp_tcl_root = `$SPACK_ROOT/bin/spack python -c "print(spack.util.path.canonicalize_path(spack.config.get('config:module_roots', {}).get('tcl')))"`
# Set up modules and dotkit search paths in the user environment # Set up modules and dotkit search paths in the user environment
_spack_pathadd DK_NODE "$_sp_dotkit_root/$_sp_sys_type" _spack_pathadd DK_NODE "$_sp_dotkit_root/$_sp_sys_type"

View File

@ -239,8 +239,8 @@ fi;
_python_command=$(printf "%s\\\n%s\\\n%s" \ _python_command=$(printf "%s\\\n%s\\\n%s" \
"print(\'_sp_sys_type={0}\'.format(spack.architecture.sys_type()))" \ "print(\'_sp_sys_type={0}\'.format(spack.architecture.sys_type()))" \
"print(\'_sp_dotkit_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get_config(\'config\').get(\'module_roots\', {}).get(\'dotkit\'))))" \ "print(\'_sp_dotkit_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get(\'config:module_roots\', {}).get(\'dotkit\'))))" \
"print(\'_sp_tcl_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get_config(\'config\').get(\'module_roots\', {}).get(\'tcl\'))))" "print(\'_sp_tcl_root={0}\'.format(spack.util.path.canonicalize_path(spack.config.get(\'config:module_roots\', {}).get(\'tcl\'))))"
) )
_assignment_command=$(spack-python -c "exec('${_python_command}')") _assignment_command=$(spack-python -c "exec('${_python_command}')")