spack config: new subcommands add/remove (#13920)
spack config add <value>: add nested value value to the configuration scope specified spack config remove/rm: remove specified configuration from the relevant scope
This commit is contained in:
parent
089a21dd1d
commit
b26e93af3d
@ -5,12 +5,14 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.schema.env
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
from spack.util.editor import editor
|
from spack.util.editor import editor
|
||||||
|
|
||||||
description = "get and set configuration options"
|
description = "get and set configuration options"
|
||||||
@ -58,24 +60,48 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
sp.add_parser('list', help='list configuration sections')
|
sp.add_parser('list', help='list configuration sections')
|
||||||
|
|
||||||
|
add_parser = sp.add_parser('add', help='add configuration parameters')
|
||||||
|
add_parser.add_argument(
|
||||||
|
'path', nargs='?',
|
||||||
|
help="colon-separated path to config that should be added,"
|
||||||
|
" e.g. 'config:default:true'")
|
||||||
|
add_parser.add_argument(
|
||||||
|
'-f', '--file',
|
||||||
|
help="file from which to set all config values"
|
||||||
|
)
|
||||||
|
|
||||||
|
remove_parser = sp.add_parser('remove', aliases=['rm'],
|
||||||
|
help='remove configuration parameters')
|
||||||
|
remove_parser.add_argument(
|
||||||
|
'path',
|
||||||
|
help="colon-separated path to config that should be removed,"
|
||||||
|
" e.g. 'config:default:true'")
|
||||||
|
|
||||||
|
# Make the add parser available later
|
||||||
|
setup_parser.add_parser = add_parser
|
||||||
|
|
||||||
|
|
||||||
def _get_scope_and_section(args):
|
def _get_scope_and_section(args):
|
||||||
"""Extract config scope and section from arguments."""
|
"""Extract config scope and section from arguments."""
|
||||||
scope = args.scope
|
scope = args.scope
|
||||||
section = args.section
|
section = getattr(args, 'section', None)
|
||||||
|
path = getattr(args, 'path', None)
|
||||||
|
|
||||||
# w/no args and an active environment, point to env manifest
|
# w/no args and an active environment, point to env manifest
|
||||||
if not args.section:
|
if not section:
|
||||||
env = ev.get_env(args, 'config edit')
|
env = ev.get_env(args, 'config edit')
|
||||||
if env:
|
if env:
|
||||||
scope = env.env_file_config_scope_name()
|
scope = env.env_file_config_scope_name()
|
||||||
|
|
||||||
# set scope defaults
|
# set scope defaults
|
||||||
elif not args.scope:
|
elif not scope:
|
||||||
if section == 'compilers':
|
scope = spack.config.default_modify_scope(section)
|
||||||
scope = spack.config.default_modify_scope()
|
|
||||||
else:
|
# special handling for commands that take value instead of section
|
||||||
scope = 'user'
|
if path:
|
||||||
|
section = path[:path.find(':')] if ':' in path else path
|
||||||
|
if not scope:
|
||||||
|
scope = spack.config.default_modify_scope(section)
|
||||||
|
|
||||||
return scope, section
|
return scope, section
|
||||||
|
|
||||||
@ -135,11 +161,126 @@ def config_list(args):
|
|||||||
print(' '.join(list(spack.config.section_schemas)))
|
print(' '.join(list(spack.config.section_schemas)))
|
||||||
|
|
||||||
|
|
||||||
|
def set_config(args, section, new, scope):
|
||||||
|
if re.match(r'env.*', scope):
|
||||||
|
e = ev.get_env(args, 'config add')
|
||||||
|
e.set_config(section, new)
|
||||||
|
else:
|
||||||
|
spack.config.set(section, new, scope=scope)
|
||||||
|
|
||||||
|
|
||||||
|
def config_add(args):
|
||||||
|
"""Add the given configuration to the specified config scope
|
||||||
|
|
||||||
|
This is a stateful operation that edits the config files."""
|
||||||
|
if not (args.file or args.path):
|
||||||
|
tty.error("No changes requested. Specify a file or value.")
|
||||||
|
setup_parser.add_parser.print_help()
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
scope, section = _get_scope_and_section(args)
|
||||||
|
|
||||||
|
# Updates from file
|
||||||
|
if args.file:
|
||||||
|
# Get file as config dict
|
||||||
|
data = spack.config.read_config_file(args.file)
|
||||||
|
if any(k in data for k in spack.schema.env.keys):
|
||||||
|
data = ev.config_dict(data)
|
||||||
|
|
||||||
|
# update all sections from config dict
|
||||||
|
# We have to iterate on keys to keep overrides from the file
|
||||||
|
for section in data.keys():
|
||||||
|
if section in spack.config.section_schemas.keys():
|
||||||
|
# Special handling for compiler scope difference
|
||||||
|
# Has to be handled after we choose a section
|
||||||
|
if scope is None:
|
||||||
|
scope = spack.config.default_modify_scope(section)
|
||||||
|
|
||||||
|
value = data[section]
|
||||||
|
existing = spack.config.get(section, scope=scope)
|
||||||
|
new = spack.config.merge_yaml(existing, value)
|
||||||
|
|
||||||
|
set_config(args, section, new, scope)
|
||||||
|
|
||||||
|
if args.path:
|
||||||
|
components = spack.config.process_config_path(args.path)
|
||||||
|
|
||||||
|
has_existing_value = True
|
||||||
|
path = ''
|
||||||
|
override = False
|
||||||
|
for idx, name in enumerate(components[:-1]):
|
||||||
|
# First handle double colons in constructing path
|
||||||
|
colon = '::' if override else ':' if path else ''
|
||||||
|
path += colon + name
|
||||||
|
if getattr(name, 'override', False):
|
||||||
|
override = True
|
||||||
|
else:
|
||||||
|
override = False
|
||||||
|
|
||||||
|
# Test whether there is an existing value at this level
|
||||||
|
existing = spack.config.get(path, scope=scope)
|
||||||
|
|
||||||
|
if existing is None:
|
||||||
|
has_existing_value = False
|
||||||
|
# We've nested further than existing config, so we need the
|
||||||
|
# type information for validation to know how to handle bare
|
||||||
|
# values appended to lists.
|
||||||
|
existing = spack.config.get_valid_type(path)
|
||||||
|
|
||||||
|
# construct value from this point down
|
||||||
|
value = syaml.load_config(components[-1])
|
||||||
|
for component in reversed(components[idx + 1:-1]):
|
||||||
|
value = {component: value}
|
||||||
|
break
|
||||||
|
|
||||||
|
if has_existing_value:
|
||||||
|
path, _, value = args.path.rpartition(':')
|
||||||
|
value = syaml.load_config(value)
|
||||||
|
existing = spack.config.get(path, scope=scope)
|
||||||
|
|
||||||
|
# append values to lists
|
||||||
|
if isinstance(existing, list) and not isinstance(value, list):
|
||||||
|
value = [value]
|
||||||
|
|
||||||
|
# merge value into existing
|
||||||
|
new = spack.config.merge_yaml(existing, value)
|
||||||
|
set_config(args, path, new, scope)
|
||||||
|
|
||||||
|
|
||||||
|
def config_remove(args):
|
||||||
|
"""Remove the given configuration from the specified config scope
|
||||||
|
|
||||||
|
This is a stateful operation that edits the config files."""
|
||||||
|
scope, _ = _get_scope_and_section(args)
|
||||||
|
|
||||||
|
path, _, value = args.path.rpartition(':')
|
||||||
|
existing = spack.config.get(path, scope=scope)
|
||||||
|
|
||||||
|
if not isinstance(existing, (list, dict)):
|
||||||
|
path, _, value = path.rpartition(':')
|
||||||
|
existing = spack.config.get(path, scope=scope)
|
||||||
|
|
||||||
|
value = syaml.load(value)
|
||||||
|
|
||||||
|
if isinstance(existing, list):
|
||||||
|
values = value if isinstance(value, list) else [value]
|
||||||
|
for v in values:
|
||||||
|
existing.remove(v)
|
||||||
|
elif isinstance(existing, dict):
|
||||||
|
existing.pop(value, None)
|
||||||
|
else:
|
||||||
|
# This should be impossible to reach
|
||||||
|
raise spack.config.ConfigError('Config has nested non-dict values')
|
||||||
|
|
||||||
|
set_config(args, path, existing, scope)
|
||||||
|
|
||||||
|
|
||||||
def config(parser, args):
|
def config(parser, args):
|
||||||
action = {
|
action = {'get': config_get,
|
||||||
'get': config_get,
|
'blame': config_blame,
|
||||||
'blame': config_blame,
|
'edit': config_edit,
|
||||||
'edit': config_edit,
|
'list': config_list,
|
||||||
'list': config_list,
|
'add': config_add,
|
||||||
}
|
'rm': config_remove,
|
||||||
|
'remove': config_remove}
|
||||||
action[args.config_command](args)
|
action[args.config_command](args)
|
||||||
|
@ -186,7 +186,7 @@ def _update_pkg_config(pkg_to_entries, not_buildable):
|
|||||||
cfg_scope = spack.config.default_modify_scope()
|
cfg_scope = spack.config.default_modify_scope()
|
||||||
pkgs_cfg = spack.config.get('packages', scope=cfg_scope)
|
pkgs_cfg = spack.config.get('packages', scope=cfg_scope)
|
||||||
|
|
||||||
spack.config._merge_yaml(pkgs_cfg, pkg_to_cfg)
|
spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||||
spack.config.set('packages', pkgs_cfg, scope=cfg_scope)
|
spack.config.set('packages', pkgs_cfg, scope=cfg_scope)
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,12 +56,12 @@
|
|||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.config
|
import spack.schema.config
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
|
import spack.schema.env
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
|
||||||
# Hacked yaml for configuration files preserves line numbers.
|
# Hacked yaml for configuration files preserves line numbers.
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
section_schemas = {
|
section_schemas = {
|
||||||
'compilers': spack.schema.compilers.schema,
|
'compilers': spack.schema.compilers.schema,
|
||||||
@ -70,9 +70,15 @@
|
|||||||
'packages': spack.schema.packages.schema,
|
'packages': spack.schema.packages.schema,
|
||||||
'modules': spack.schema.modules.schema,
|
'modules': spack.schema.modules.schema,
|
||||||
'config': spack.schema.config.schema,
|
'config': spack.schema.config.schema,
|
||||||
'upstreams': spack.schema.upstreams.schema
|
'upstreams': spack.schema.upstreams.schema,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Same as above, but including keys for environments
|
||||||
|
# this allows us to unify config reading between configs and environments
|
||||||
|
all_schemas = copy.deepcopy(section_schemas)
|
||||||
|
all_schemas.update(dict((key, spack.schema.env.schema)
|
||||||
|
for key in spack.schema.env.keys))
|
||||||
|
|
||||||
#: Builtin paths to configuration files in Spack
|
#: Builtin paths to configuration files in Spack
|
||||||
configuration_paths = (
|
configuration_paths = (
|
||||||
# Default configuration scope is the lowest-level scope. These are
|
# Default configuration scope is the lowest-level scope. These are
|
||||||
@ -142,19 +148,21 @@ def get_section(self, section):
|
|||||||
if section not in self.sections:
|
if section not in self.sections:
|
||||||
path = self.get_section_filename(section)
|
path = self.get_section_filename(section)
|
||||||
schema = section_schemas[section]
|
schema = section_schemas[section]
|
||||||
data = _read_config_file(path, schema)
|
data = read_config_file(path, schema)
|
||||||
self.sections[section] = data
|
self.sections[section] = data
|
||||||
return self.sections[section]
|
return self.sections[section]
|
||||||
|
|
||||||
def write_section(self, section):
|
def write_section(self, section):
|
||||||
filename = self.get_section_filename(section)
|
filename = self.get_section_filename(section)
|
||||||
data = self.get_section(section)
|
data = self.get_section(section)
|
||||||
validate(data, section_schemas[section])
|
|
||||||
|
# We copy data here to avoid adding defaults at write time
|
||||||
|
validate_data = copy.deepcopy(data)
|
||||||
|
validate(validate_data, section_schemas[section])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mkdirp(self.path)
|
mkdirp(self.path)
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w') as f:
|
||||||
validate(data, section_schemas[section])
|
|
||||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||||
except (yaml.YAMLError, IOError) as e:
|
except (yaml.YAMLError, IOError) as e:
|
||||||
raise ConfigFileError(
|
raise ConfigFileError(
|
||||||
@ -217,7 +225,7 @@ def get_section(self, section):
|
|||||||
# }
|
# }
|
||||||
# }
|
# }
|
||||||
if self._raw_data is None:
|
if self._raw_data is None:
|
||||||
self._raw_data = _read_config_file(self.path, self.schema)
|
self._raw_data = read_config_file(self.path, self.schema)
|
||||||
if self._raw_data is None:
|
if self._raw_data is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -376,6 +384,16 @@ def highest_precedence_scope(self):
|
|||||||
"""Non-internal scope with highest precedence."""
|
"""Non-internal scope with highest precedence."""
|
||||||
return next(reversed(self.file_scopes), None)
|
return next(reversed(self.file_scopes), None)
|
||||||
|
|
||||||
|
def highest_precedence_non_platform_scope(self):
|
||||||
|
"""Non-internal non-platform scope with highest precedence
|
||||||
|
|
||||||
|
Platform-specific scopes are of the form scope/platform"""
|
||||||
|
generator = reversed(self.file_scopes)
|
||||||
|
highest = next(generator, None)
|
||||||
|
while highest and '/' in highest.name:
|
||||||
|
highest = next(generator, None)
|
||||||
|
return highest
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr):
|
def matching_scopes(self, reg_expr):
|
||||||
"""
|
"""
|
||||||
List of all scopes whose names match the provided regular expression.
|
List of all scopes whose names match the provided regular expression.
|
||||||
@ -435,8 +453,21 @@ def update_config(self, section, update_data, scope=None):
|
|||||||
_validate_section_name(section) # validate section name
|
_validate_section_name(section) # validate section name
|
||||||
scope = self._validate_scope(scope) # get ConfigScope object
|
scope = self._validate_scope(scope) # get ConfigScope object
|
||||||
|
|
||||||
|
# manually preserve comments
|
||||||
|
need_comment_copy = (section in scope.sections and
|
||||||
|
scope.sections[section] is not None)
|
||||||
|
if need_comment_copy:
|
||||||
|
comments = getattr(scope.sections[section][section],
|
||||||
|
yaml.comments.Comment.attrib,
|
||||||
|
None)
|
||||||
|
|
||||||
# read only the requested section's data.
|
# read only the requested section's data.
|
||||||
scope.sections[section] = {section: update_data}
|
scope.sections[section] = syaml.syaml_dict({section: update_data})
|
||||||
|
if need_comment_copy and comments:
|
||||||
|
setattr(scope.sections[section][section],
|
||||||
|
yaml.comments.Comment.attrib,
|
||||||
|
comments)
|
||||||
|
|
||||||
scope.write_section(section)
|
scope.write_section(section)
|
||||||
|
|
||||||
def get_config(self, section, scope=None):
|
def get_config(self, section, scope=None):
|
||||||
@ -483,14 +514,17 @@ def get_config(self, section, scope=None):
|
|||||||
if section not in data:
|
if section not in data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
merged_section = _merge_yaml(merged_section, data)
|
merged_section = merge_yaml(merged_section, data)
|
||||||
|
|
||||||
# no config files -- empty config.
|
# no config files -- empty config.
|
||||||
if section not in merged_section:
|
if section not in merged_section:
|
||||||
return {}
|
return syaml.syaml_dict()
|
||||||
|
|
||||||
# take the top key off before returning.
|
# take the top key off before returning.
|
||||||
return merged_section[section]
|
ret = merged_section[section]
|
||||||
|
if isinstance(ret, dict):
|
||||||
|
ret = syaml.syaml_dict(ret)
|
||||||
|
return ret
|
||||||
|
|
||||||
def get(self, path, default=None, scope=None):
|
def get(self, path, default=None, scope=None):
|
||||||
"""Get a config section or a single value from one.
|
"""Get a config section or a single value from one.
|
||||||
@ -506,14 +540,12 @@ def get(self, path, default=None, scope=None):
|
|||||||
|
|
||||||
We use ``:`` as the separator, like YAML objects.
|
We use ``:`` as the separator, like YAML objects.
|
||||||
"""
|
"""
|
||||||
# TODO: Currently only handles maps. Think about lists if neded.
|
# TODO: Currently only handles maps. Think about lists if needed.
|
||||||
section, _, rest = path.partition(':')
|
parts = process_config_path(path)
|
||||||
|
section = parts.pop(0)
|
||||||
|
|
||||||
value = self.get_config(section, scope=scope)
|
value = self.get_config(section, scope=scope)
|
||||||
if not rest:
|
|
||||||
return value
|
|
||||||
|
|
||||||
parts = rest.split(':')
|
|
||||||
while parts:
|
while parts:
|
||||||
key = parts.pop(0)
|
key = parts.pop(0)
|
||||||
value = value.get(key, default)
|
value = value.get(key, default)
|
||||||
@ -525,21 +557,40 @@ def set(self, path, value, scope=None):
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
parts = _process_config_path(path)
|
if ':' not in path:
|
||||||
|
# handle bare section name as path
|
||||||
|
self.update_config(path, value, scope=scope)
|
||||||
|
return
|
||||||
|
|
||||||
|
parts = process_config_path(path)
|
||||||
section = parts.pop(0)
|
section = parts.pop(0)
|
||||||
|
|
||||||
if not parts:
|
section_data = self.get_config(section, scope=scope)
|
||||||
self.update_config(section, value, scope=scope)
|
|
||||||
else:
|
|
||||||
section_data = self.get_config(section, scope=scope)
|
|
||||||
|
|
||||||
data = section_data
|
data = section_data
|
||||||
while len(parts) > 1:
|
while len(parts) > 1:
|
||||||
key = parts.pop(0)
|
key = parts.pop(0)
|
||||||
data = data[key]
|
|
||||||
data[parts[0]] = value
|
|
||||||
|
|
||||||
self.update_config(section, section_data, scope=scope)
|
if _override(key):
|
||||||
|
new = type(data[key])()
|
||||||
|
del data[key]
|
||||||
|
else:
|
||||||
|
new = data[key]
|
||||||
|
|
||||||
|
if isinstance(new, dict):
|
||||||
|
# Make it an ordered dict
|
||||||
|
new = syaml.syaml_dict(new)
|
||||||
|
# reattach to parent object
|
||||||
|
data[key] = new
|
||||||
|
data = new
|
||||||
|
|
||||||
|
if _override(parts[0]):
|
||||||
|
data.pop(parts[0], None)
|
||||||
|
|
||||||
|
# update new value
|
||||||
|
data[parts[0]] = value
|
||||||
|
|
||||||
|
self.update_config(section, section_data, scope=scope)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
"""Iterate over scopes in this configuration."""
|
"""Iterate over scopes in this configuration."""
|
||||||
@ -692,26 +743,53 @@ def _validate_section_name(section):
|
|||||||
% (section, " ".join(section_schemas.keys())))
|
% (section, " ".join(section_schemas.keys())))
|
||||||
|
|
||||||
|
|
||||||
def validate(data, schema, set_defaults=True):
|
def validate(data, schema, filename=None):
|
||||||
"""Validate data read in from a Spack YAML file.
|
"""Validate data read in from a Spack YAML file.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
data (dict or list): data read from a Spack YAML file
|
data (dict or list): data read from a Spack YAML file
|
||||||
schema (dict or list): jsonschema to validate data
|
schema (dict or list): jsonschema to validate data
|
||||||
set_defaults (bool): whether to set defaults based on the schema
|
|
||||||
|
|
||||||
This leverages the line information (start_mark, end_mark) stored
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
on Spack YAML structures.
|
on Spack YAML structures.
|
||||||
"""
|
"""
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
# validate a copy to avoid adding defaults
|
||||||
|
# This allows us to round-trip data without adding to it.
|
||||||
|
test_data = copy.deepcopy(data)
|
||||||
|
|
||||||
|
if isinstance(test_data, yaml.comments.CommentedMap):
|
||||||
|
# HACK to fully copy ruamel CommentedMap that doesn't provide copy
|
||||||
|
# method. Especially necessary for environments
|
||||||
|
setattr(test_data,
|
||||||
|
yaml.comments.Comment.attrib,
|
||||||
|
getattr(data,
|
||||||
|
yaml.comments.Comment.attrib,
|
||||||
|
yaml.comments.Comment()))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.schema.Validator(schema).validate(data)
|
spack.schema.Validator(schema).validate(test_data)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
raise ConfigFormatError(e, data)
|
if hasattr(e.instance, 'lc'):
|
||||||
|
line_number = e.instance.lc.line + 1
|
||||||
|
else:
|
||||||
|
line_number = None
|
||||||
|
raise ConfigFormatError(e, data, filename, line_number)
|
||||||
|
# return the validated data so that we can access the raw data
|
||||||
|
# mostly relevant for environments
|
||||||
|
return test_data
|
||||||
|
|
||||||
|
|
||||||
def _read_config_file(filename, schema):
|
def read_config_file(filename, schema=None):
|
||||||
"""Read a YAML configuration file."""
|
"""Read a YAML configuration file.
|
||||||
|
|
||||||
|
User can provide a schema for validation. If no schema is provided,
|
||||||
|
we will infer the schema from the top-level key."""
|
||||||
|
# Dev: Inferring schema and allowing it to be provided directly allows us
|
||||||
|
# to preserve flexibility in calling convention (don't need to provide
|
||||||
|
# schema when it's not necessary) while allowing us to validate against a
|
||||||
|
# known schema when the top-level key could be incorrect.
|
||||||
|
|
||||||
# Ignore nonexisting files.
|
# Ignore nonexisting files.
|
||||||
if not os.path.exists(filename):
|
if not os.path.exists(filename):
|
||||||
return None
|
return None
|
||||||
@ -729,9 +807,16 @@ def _read_config_file(filename, schema):
|
|||||||
data = syaml.load_config(f)
|
data = syaml.load_config(f)
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
|
if not schema:
|
||||||
|
key = next(iter(data))
|
||||||
|
schema = all_schemas[key]
|
||||||
validate(data, schema)
|
validate(data, schema)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
except StopIteration:
|
||||||
|
raise ConfigFileError(
|
||||||
|
"Config file is empty or is not a valid YAML dict: %s" % filename)
|
||||||
|
|
||||||
except MarkedYAMLError as e:
|
except MarkedYAMLError as e:
|
||||||
raise ConfigFileError(
|
raise ConfigFileError(
|
||||||
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
||||||
@ -772,13 +857,40 @@ def _mark_internal(data, name):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def _merge_yaml(dest, source):
|
def get_valid_type(path):
|
||||||
|
"""Returns an instance of a type that will pass validation for path.
|
||||||
|
|
||||||
|
The instance is created by calling the constructor with no arguments.
|
||||||
|
If multiple types will satisfy validation for data at the configuration
|
||||||
|
path given, the priority order is ``list``, ``dict``, ``str``, ``bool``,
|
||||||
|
``int``, ``float``.
|
||||||
|
"""
|
||||||
|
components = process_config_path(path)
|
||||||
|
section = components[0]
|
||||||
|
for type in (list, syaml.syaml_dict, str, bool, int, float):
|
||||||
|
try:
|
||||||
|
ret = type()
|
||||||
|
test_data = ret
|
||||||
|
for component in reversed(components):
|
||||||
|
test_data = {component: test_data}
|
||||||
|
validate(test_data, section_schemas[section])
|
||||||
|
return ret
|
||||||
|
except (ConfigFormatError, AttributeError):
|
||||||
|
# This type won't validate, try the next one
|
||||||
|
# Except AttributeError because undefined behavior of dict ordering
|
||||||
|
# in python 3.5 can cause the validator to raise an AttributeError
|
||||||
|
# instead of a ConfigFormatError.
|
||||||
|
pass
|
||||||
|
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||||
|
|
||||||
|
|
||||||
|
def merge_yaml(dest, source):
|
||||||
"""Merges source into dest; entries in source take precedence over dest.
|
"""Merges source into dest; entries in source take precedence over dest.
|
||||||
|
|
||||||
This routine may modify dest and should be assigned to dest, in
|
This routine may modify dest and should be assigned to dest, in
|
||||||
case dest was None to begin with, e.g.:
|
case dest was None to begin with, e.g.:
|
||||||
|
|
||||||
dest = _merge_yaml(dest, source)
|
dest = merge_yaml(dest, source)
|
||||||
|
|
||||||
Config file authors can optionally end any attribute in a dict
|
Config file authors can optionally end any attribute in a dict
|
||||||
with `::` instead of `:`, and the key will override that of the
|
with `::` instead of `:`, and the key will override that of the
|
||||||
@ -793,6 +905,7 @@ def they_are(t):
|
|||||||
|
|
||||||
# Source list is prepended (for precedence)
|
# Source list is prepended (for precedence)
|
||||||
if they_are(list):
|
if they_are(list):
|
||||||
|
# Make sure to copy ruamel comments
|
||||||
dest[:] = source + [x for x in dest if x not in source]
|
dest[:] = source + [x for x in dest if x not in source]
|
||||||
return dest
|
return dest
|
||||||
|
|
||||||
@ -805,9 +918,10 @@ def they_are(t):
|
|||||||
if _override(sk) or sk not in dest:
|
if _override(sk) or sk not in dest:
|
||||||
# if sk ended with ::, or if it's new, completely override
|
# if sk ended with ::, or if it's new, completely override
|
||||||
dest[sk] = copy.copy(sv)
|
dest[sk] = copy.copy(sv)
|
||||||
|
# copy ruamel comments manually
|
||||||
else:
|
else:
|
||||||
# otherwise, merge the YAML
|
# otherwise, merge the YAML
|
||||||
dest[sk] = _merge_yaml(dest[sk], source[sk])
|
dest[sk] = merge_yaml(dest[sk], source[sk])
|
||||||
|
|
||||||
# this seems unintuitive, but see below. We need this because
|
# this seems unintuitive, but see below. We need this because
|
||||||
# Python dicts do not overwrite keys on insert, and we want
|
# Python dicts do not overwrite keys on insert, and we want
|
||||||
@ -837,7 +951,7 @@ def they_are(t):
|
|||||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
# Process a path argument to config.set() that may contain overrides ('::' or
|
||||||
# trailing ':')
|
# trailing ':')
|
||||||
#
|
#
|
||||||
def _process_config_path(path):
|
def process_config_path(path):
|
||||||
result = []
|
result = []
|
||||||
if path.startswith(':'):
|
if path.startswith(':'):
|
||||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
|
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
|
||||||
@ -861,13 +975,20 @@ def _process_config_path(path):
|
|||||||
#
|
#
|
||||||
# Settings for commands that modify configuration
|
# Settings for commands that modify configuration
|
||||||
#
|
#
|
||||||
def default_modify_scope():
|
def default_modify_scope(section='config'):
|
||||||
"""Return the config scope that commands should modify by default.
|
"""Return the config scope that commands should modify by default.
|
||||||
|
|
||||||
Commands that modify configuration by default modify the *highest*
|
Commands that modify configuration by default modify the *highest*
|
||||||
priority scope.
|
priority scope.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
section (boolean): Section for which to get the default scope.
|
||||||
|
If this is not 'compilers', a general (non-platform) scope is used.
|
||||||
"""
|
"""
|
||||||
return spack.config.config.highest_precedence_scope().name
|
if section == 'compilers':
|
||||||
|
return spack.config.config.highest_precedence_scope().name
|
||||||
|
else:
|
||||||
|
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||||
|
|
||||||
|
|
||||||
def default_list_scope():
|
def default_list_scope():
|
||||||
@ -894,17 +1015,17 @@ class ConfigFormatError(ConfigError):
|
|||||||
"""Raised when a configuration format does not match its schema."""
|
"""Raised when a configuration format does not match its schema."""
|
||||||
|
|
||||||
def __init__(self, validation_error, data, filename=None, line=None):
|
def __init__(self, validation_error, data, filename=None, line=None):
|
||||||
|
# spack yaml has its own file/line marks -- try to find them
|
||||||
|
# we prioritize these over the inputs
|
||||||
|
mark = self._get_mark(validation_error, data)
|
||||||
|
if mark:
|
||||||
|
filename = mark.name
|
||||||
|
line = mark.line + 1
|
||||||
|
|
||||||
self.filename = filename # record this for ruamel.yaml
|
self.filename = filename # record this for ruamel.yaml
|
||||||
|
|
||||||
|
# construct location
|
||||||
location = '<unknown file>'
|
location = '<unknown file>'
|
||||||
|
|
||||||
# spack yaml has its own file/line marks -- try to find them
|
|
||||||
if not filename and not line:
|
|
||||||
mark = self._get_mark(validation_error, data)
|
|
||||||
if mark:
|
|
||||||
filename = mark.name
|
|
||||||
line = mark.line + 1
|
|
||||||
|
|
||||||
if filename:
|
if filename:
|
||||||
location = '%s' % filename
|
location = '%s' % filename
|
||||||
if line is not None:
|
if line is not None:
|
||||||
|
@ -80,9 +80,6 @@
|
|||||||
#: version of the lockfile format. Must increase monotonically.
|
#: version of the lockfile format. Must increase monotonically.
|
||||||
lockfile_format_version = 2
|
lockfile_format_version = 2
|
||||||
|
|
||||||
#: legal first keys in the spack.yaml manifest file
|
|
||||||
env_schema_keys = ('spack', 'env')
|
|
||||||
|
|
||||||
# Magic names
|
# Magic names
|
||||||
# The name of the standalone spec list in the manifest yaml
|
# The name of the standalone spec list in the manifest yaml
|
||||||
user_speclist_name = 'specs'
|
user_speclist_name = 'specs'
|
||||||
@ -366,7 +363,7 @@ def create(name, init_file=None, with_view=None):
|
|||||||
|
|
||||||
def config_dict(yaml_data):
|
def config_dict(yaml_data):
|
||||||
"""Get the configuration scope section out of an spack.yaml"""
|
"""Get the configuration scope section out of an spack.yaml"""
|
||||||
key = spack.config.first_existing(yaml_data, env_schema_keys)
|
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
|
||||||
return yaml_data[key]
|
return yaml_data[key]
|
||||||
|
|
||||||
|
|
||||||
@ -392,42 +389,19 @@ def all_environments():
|
|||||||
yield read(name)
|
yield read(name)
|
||||||
|
|
||||||
|
|
||||||
def validate(data, filename=None):
|
|
||||||
# validating changes data by adding defaults. Return validated data
|
|
||||||
validate_data = copy.deepcopy(data)
|
|
||||||
# HACK to fully copy ruamel CommentedMap that doesn't provide copy method
|
|
||||||
import ruamel.yaml as yaml
|
|
||||||
setattr(
|
|
||||||
validate_data,
|
|
||||||
yaml.comments.Comment.attrib,
|
|
||||||
getattr(data, yaml.comments.Comment.attrib, yaml.comments.Comment())
|
|
||||||
)
|
|
||||||
|
|
||||||
import jsonschema
|
|
||||||
try:
|
|
||||||
spack.schema.Validator(spack.schema.env.schema).validate(validate_data)
|
|
||||||
except jsonschema.ValidationError as e:
|
|
||||||
if hasattr(e.instance, 'lc'):
|
|
||||||
line_number = e.instance.lc.line + 1
|
|
||||||
else:
|
|
||||||
line_number = None
|
|
||||||
raise spack.config.ConfigFormatError(
|
|
||||||
e, data, filename, line_number)
|
|
||||||
return validate_data
|
|
||||||
|
|
||||||
|
|
||||||
def _read_yaml(str_or_file):
|
def _read_yaml(str_or_file):
|
||||||
"""Read YAML from a file for round-trip parsing."""
|
"""Read YAML from a file for round-trip parsing."""
|
||||||
data = syaml.load_config(str_or_file)
|
data = syaml.load_config(str_or_file)
|
||||||
filename = getattr(str_or_file, 'name', None)
|
filename = getattr(str_or_file, 'name', None)
|
||||||
default_data = validate(data, filename)
|
default_data = spack.config.validate(
|
||||||
|
data, spack.schema.env.schema, filename)
|
||||||
return (data, default_data)
|
return (data, default_data)
|
||||||
|
|
||||||
|
|
||||||
def _write_yaml(data, str_or_file):
|
def _write_yaml(data, str_or_file):
|
||||||
"""Write YAML to a file preserving comments and dict order."""
|
"""Write YAML to a file preserving comments and dict order."""
|
||||||
filename = getattr(str_or_file, 'name', None)
|
filename = getattr(str_or_file, 'name', None)
|
||||||
validate(data, filename)
|
spack.config.validate(data, spack.schema.env.schema, filename)
|
||||||
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
@ -815,12 +789,21 @@ def env_file_config_scope(self):
|
|||||||
return spack.config.SingleFileScope(config_name,
|
return spack.config.SingleFileScope(config_name,
|
||||||
self.manifest_path,
|
self.manifest_path,
|
||||||
spack.schema.env.schema,
|
spack.schema.env.schema,
|
||||||
[env_schema_keys])
|
[spack.schema.env.keys])
|
||||||
|
|
||||||
def config_scopes(self):
|
def config_scopes(self):
|
||||||
"""A list of all configuration scopes for this environment."""
|
"""A list of all configuration scopes for this environment."""
|
||||||
return self.included_config_scopes() + [self.env_file_config_scope()]
|
return self.included_config_scopes() + [self.env_file_config_scope()]
|
||||||
|
|
||||||
|
def set_config(self, path, value):
|
||||||
|
"""Set configuration for this environment"""
|
||||||
|
yaml = config_dict(self.yaml)
|
||||||
|
keys = spack.config.process_config_path(path)
|
||||||
|
for key in keys[:-1]:
|
||||||
|
yaml = yaml[key]
|
||||||
|
yaml[keys[-1]] = value
|
||||||
|
self.write()
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove this environment from Spack entirely."""
|
"""Remove this environment from Spack entirely."""
|
||||||
shutil.rmtree(self.path)
|
shutil.rmtree(self.path)
|
||||||
@ -1501,8 +1484,12 @@ def write(self, regenerate_views=True):
|
|||||||
del yaml_dict[key]
|
del yaml_dict[key]
|
||||||
|
|
||||||
# if all that worked, write out the manifest file at the top level
|
# if all that worked, write out the manifest file at the top level
|
||||||
# Only actually write if it has changed or was never written
|
# (we used to check whether the yaml had changed and not write it out
|
||||||
changed = self.yaml != self.raw_yaml
|
# if it hadn't. We can't do that anymore because it could be the only
|
||||||
|
# thing that changed is the "override" attribute on a config dict,
|
||||||
|
# which would not show up in even a string comparison between the two
|
||||||
|
# keys).
|
||||||
|
changed = not yaml_equivalent(self.yaml, self.raw_yaml)
|
||||||
written = os.path.exists(self.manifest_path)
|
written = os.path.exists(self.manifest_path)
|
||||||
if changed or not written:
|
if changed or not written:
|
||||||
self.raw_yaml = copy.deepcopy(self.yaml)
|
self.raw_yaml = copy.deepcopy(self.yaml)
|
||||||
@ -1529,6 +1516,39 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
activate(self._previous_active)
|
activate(self._previous_active)
|
||||||
|
|
||||||
|
|
||||||
|
def yaml_equivalent(first, second):
|
||||||
|
"""Returns whether two spack yaml items are equivalent, including overrides
|
||||||
|
"""
|
||||||
|
if isinstance(first, dict):
|
||||||
|
return isinstance(second, dict) and _equiv_dict(first, second)
|
||||||
|
elif isinstance(first, list):
|
||||||
|
return isinstance(second, list) and _equiv_list(first, second)
|
||||||
|
else: # it's a string
|
||||||
|
return isinstance(second, six.string_types) and first == second
|
||||||
|
|
||||||
|
|
||||||
|
def _equiv_list(first, second):
|
||||||
|
"""Returns whether two spack yaml lists are equivalent, including overrides
|
||||||
|
"""
|
||||||
|
if len(first) != len(second):
|
||||||
|
return False
|
||||||
|
return all(yaml_equivalent(f, s) for f, s in zip(first, second))
|
||||||
|
|
||||||
|
|
||||||
|
def _equiv_dict(first, second):
|
||||||
|
"""Returns whether two spack yaml dicts are equivalent, including overrides
|
||||||
|
"""
|
||||||
|
if len(first) != len(second):
|
||||||
|
return False
|
||||||
|
same_values = all(yaml_equivalent(fv, sv)
|
||||||
|
for fv, sv in zip(first.values(), second.values()))
|
||||||
|
same_keys_with_same_overrides = all(
|
||||||
|
fk == sk and getattr(fk, 'override', False) == getattr(sk, 'override',
|
||||||
|
False)
|
||||||
|
for fk, sk in zip(first.keys(), second.keys()))
|
||||||
|
return same_values and same_keys_with_same_overrides
|
||||||
|
|
||||||
|
|
||||||
def display_specs(concretized_specs):
|
def display_specs(concretized_specs):
|
||||||
"""Displays the list of specs returned by `Environment.concretize()`.
|
"""Displays the list of specs returned by `Environment.concretize()`.
|
||||||
|
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
import spack.schema.merged
|
import spack.schema.merged
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
|
|
||||||
|
#: legal first keys in the schema
|
||||||
|
keys = ('spack', 'env')
|
||||||
|
|
||||||
spec_list_schema = {
|
spec_list_schema = {
|
||||||
'type': 'array',
|
'type': 'array',
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import pytest
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
@ -12,6 +12,7 @@
|
|||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
config = SpackCommand('config')
|
config = SpackCommand('config')
|
||||||
|
env = SpackCommand('env')
|
||||||
|
|
||||||
|
|
||||||
def test_get_config_scope(mock_low_high_config):
|
def test_get_config_scope(mock_low_high_config):
|
||||||
@ -46,7 +47,7 @@ def test_get_config_scope_merged(mock_low_high_config):
|
|||||||
|
|
||||||
def test_config_edit():
|
def test_config_edit():
|
||||||
"""Ensure `spack config edit` edits the right paths."""
|
"""Ensure `spack config edit` edits the right paths."""
|
||||||
dms = spack.config.default_modify_scope()
|
dms = spack.config.default_modify_scope('compilers')
|
||||||
dms_path = spack.config.config.scopes[dms].path
|
dms_path = spack.config.config.scopes[dms].path
|
||||||
user_path = spack.config.config.scopes['user'].path
|
user_path = spack.config.config.scopes['user'].path
|
||||||
|
|
||||||
@ -97,3 +98,308 @@ def test_config_list():
|
|||||||
output = config('list')
|
output = config('list')
|
||||||
assert 'compilers' in output
|
assert 'compilers' in output
|
||||||
assert 'packages' in output
|
assert 'packages' in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add(mutable_empty_config):
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
dirty: true
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_list(mutable_empty_config):
|
||||||
|
config('add', 'config:template_dirs:test1')
|
||||||
|
config('add', 'config:template_dirs:[test2]')
|
||||||
|
config('add', 'config:template_dirs:test3')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test3
|
||||||
|
- test2
|
||||||
|
- test1
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_override(mutable_empty_config):
|
||||||
|
config('--scope', 'site', 'add', 'config:template_dirs:test1')
|
||||||
|
config('add', 'config:template_dirs:[test2]')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test2
|
||||||
|
- test1
|
||||||
|
"""
|
||||||
|
|
||||||
|
config('add', 'config::template_dirs:[test2]')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test2
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_override_leaf(mutable_empty_config):
|
||||||
|
config('--scope', 'site', 'add', 'config:template_dirs:test1')
|
||||||
|
config('add', 'config:template_dirs:[test2]')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test2
|
||||||
|
- test1
|
||||||
|
"""
|
||||||
|
|
||||||
|
config('add', 'config:template_dirs::[test2]')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
'template_dirs:':
|
||||||
|
- test2
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_update_dict(mutable_empty_config):
|
||||||
|
config('add', 'packages:all:compiler:[gcc]')
|
||||||
|
config('add', 'packages:all:version:1.0.0')
|
||||||
|
output = config('get', 'packages')
|
||||||
|
|
||||||
|
expected = """packages:
|
||||||
|
all:
|
||||||
|
compiler: [gcc]
|
||||||
|
version:
|
||||||
|
- 1.0.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert output == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_ordered_dict(mutable_empty_config):
|
||||||
|
config('add', 'mirrors:first:/path/to/first')
|
||||||
|
config('add', 'mirrors:second:/path/to/second')
|
||||||
|
output = config('get', 'mirrors')
|
||||||
|
|
||||||
|
assert output == """mirrors:
|
||||||
|
first: /path/to/first
|
||||||
|
second: /path/to/second
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_invalid_fails(mutable_empty_config):
|
||||||
|
config('add', 'packages:all:variants:+debug')
|
||||||
|
with pytest.raises(
|
||||||
|
(spack.config.ConfigFormatError, AttributeError)
|
||||||
|
):
|
||||||
|
config('add', 'packages:all:True')
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_from_file(mutable_empty_config, tmpdir):
|
||||||
|
contents = """spack:
|
||||||
|
config:
|
||||||
|
dirty: true
|
||||||
|
"""
|
||||||
|
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
config('add', '-f', file)
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
dirty: true
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
|
||||||
|
contents = """spack:
|
||||||
|
config:
|
||||||
|
dirty: true
|
||||||
|
template_dirs: [test1]
|
||||||
|
"""
|
||||||
|
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
config('add', '-f', file)
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
dirty: true
|
||||||
|
template_dirs: [test1]
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_override_from_file(mutable_empty_config, tmpdir):
|
||||||
|
config('--scope', 'site', 'add', 'config:template_dirs:test1')
|
||||||
|
contents = """spack:
|
||||||
|
config::
|
||||||
|
template_dirs: [test2]
|
||||||
|
"""
|
||||||
|
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
config('add', '-f', file)
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs: [test2]
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
|
||||||
|
config('--scope', 'site', 'add', 'config:template_dirs:test1')
|
||||||
|
contents = """spack:
|
||||||
|
config:
|
||||||
|
template_dirs:: [test2]
|
||||||
|
"""
|
||||||
|
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
config('add', '-f', file)
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
'template_dirs:': [test2]
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
||||||
|
config('add', 'packages:all:compiler:[gcc]')
|
||||||
|
|
||||||
|
# contents to add to file
|
||||||
|
contents = """spack:
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
version:
|
||||||
|
- 1.0.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
# create temp file and add it to config
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
config('add', '-f', file)
|
||||||
|
|
||||||
|
# get results
|
||||||
|
output = config('get', 'packages')
|
||||||
|
|
||||||
|
expected = """packages:
|
||||||
|
all:
|
||||||
|
compiler: [gcc]
|
||||||
|
version:
|
||||||
|
- 1.0.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert output == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_invalid_file_fails(tmpdir):
|
||||||
|
# contents to add to file
|
||||||
|
# invalid because version requires a list
|
||||||
|
contents = """spack:
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
version: 1.0.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
# create temp file and add it to config
|
||||||
|
file = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(file, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
(spack.config.ConfigFormatError)
|
||||||
|
):
|
||||||
|
config('add', '-f', file)
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_remove_value(mutable_empty_config):
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
config('remove', 'config:dirty:true')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config: {}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_remove_alias_rm(mutable_empty_config):
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
config('rm', 'config:dirty:true')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config: {}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_remove_dict(mutable_empty_config):
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
config('rm', 'config:dirty')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config: {}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_from_list(mutable_empty_config):
|
||||||
|
config('add', 'config:template_dirs:test1')
|
||||||
|
config('add', 'config:template_dirs:[test2]')
|
||||||
|
config('add', 'config:template_dirs:test3')
|
||||||
|
config('remove', 'config:template_dirs:test2')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test3
|
||||||
|
- test1
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_list(mutable_empty_config):
|
||||||
|
config('add', 'config:template_dirs:test1')
|
||||||
|
config('add', 'config:template_dirs:[test2]')
|
||||||
|
config('add', 'config:template_dirs:test3')
|
||||||
|
config('remove', 'config:template_dirs:[test2]')
|
||||||
|
output = config('get', 'config')
|
||||||
|
|
||||||
|
assert output == """config:
|
||||||
|
template_dirs:
|
||||||
|
- test3
|
||||||
|
- test1
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
|
||||||
|
env = ev.create('test')
|
||||||
|
with env:
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
output = config('get')
|
||||||
|
|
||||||
|
expected = ev.default_manifest_yaml
|
||||||
|
expected += """ config:
|
||||||
|
dirty: true
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert output == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
|
||||||
|
env('create', 'test')
|
||||||
|
|
||||||
|
with ev.read('test'):
|
||||||
|
config('add', 'config:dirty:true')
|
||||||
|
|
||||||
|
with ev.read('test'):
|
||||||
|
config('rm', 'config:dirty')
|
||||||
|
output = config('get')
|
||||||
|
|
||||||
|
expected = ev.default_manifest_yaml
|
||||||
|
expected += """ config: {}
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert output == expected
|
||||||
|
@ -576,7 +576,7 @@ def get_config_error(filename, schema, yaml_string):
|
|||||||
|
|
||||||
# parse and return error, or fail.
|
# parse and return error, or fail.
|
||||||
try:
|
try:
|
||||||
spack.config._read_config_file(filename, schema)
|
spack.config.read_config_file(filename, schema)
|
||||||
except spack.config.ConfigFormatError as e:
|
except spack.config.ConfigFormatError as e:
|
||||||
return e
|
return e
|
||||||
else:
|
else:
|
||||||
|
@ -374,6 +374,19 @@ def linux_os():
|
|||||||
return LinuxOS(name=name, version=version)
|
return LinuxOS(name=name, version=version)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def default_config():
|
||||||
|
"""Isolates the default configuration from the user configs.
|
||||||
|
|
||||||
|
This ensures we can test the real default configuration without having
|
||||||
|
tests fail when the user overrides the defaults that we test against."""
|
||||||
|
defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||||
|
defaults_scope = spack.config.ConfigScope('defaults', defaults_path)
|
||||||
|
defaults_config = spack.config.Configuration(defaults_scope)
|
||||||
|
with use_configuration(defaults_config):
|
||||||
|
yield defaults_config
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def configuration_dir(tmpdir_factory, linux_os):
|
def configuration_dir(tmpdir_factory, linux_os):
|
||||||
"""Copies mock configuration files in a temporary directory. Returns the
|
"""Copies mock configuration files in a temporary directory. Returns the
|
||||||
@ -436,6 +449,19 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
|||||||
yield cfg
|
yield cfg
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||||
|
"""Empty configuration that can be modified by the tests."""
|
||||||
|
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||||
|
|
||||||
|
cfg = spack.config.Configuration(
|
||||||
|
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||||
|
for name in ['site', 'system', 'user']])
|
||||||
|
|
||||||
|
with use_configuration(cfg):
|
||||||
|
yield cfg
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def mock_low_high_config(tmpdir):
|
def mock_low_high_config(tmpdir):
|
||||||
"""Mocks two configuration scopes: 'low' and 'high'."""
|
"""Mocks two configuration scopes: 'low' and 'high'."""
|
||||||
|
@ -9,8 +9,8 @@
|
|||||||
containerize = spack.main.SpackCommand('containerize')
|
containerize = spack.main.SpackCommand('containerize')
|
||||||
|
|
||||||
|
|
||||||
def test_command(configuration_dir, capsys):
|
def test_command(default_config, container_config_dir, capsys):
|
||||||
with capsys.disabled():
|
with capsys.disabled():
|
||||||
with fs.working_dir(configuration_dir):
|
with fs.working_dir(container_config_dir):
|
||||||
output = containerize()
|
output = containerize()
|
||||||
assert 'FROM spack/ubuntu-bionic' in output
|
assert 'FROM spack/ubuntu-bionic' in output
|
||||||
|
@ -39,5 +39,5 @@ def dumper(configuration):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def configuration_dir(minimal_configuration, config_dumper):
|
def container_config_dir(minimal_configuration, config_dumper):
|
||||||
return config_dumper(minimal_configuration)
|
return config_dumper(minimal_configuration)
|
||||||
|
@ -42,7 +42,7 @@ def test_packages(minimal_configuration):
|
|||||||
assert p.list == pkgs
|
assert p.list == pkgs
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_render_works(minimal_configuration):
|
def test_ensure_render_works(minimal_configuration, default_config):
|
||||||
# Here we just want to ensure that nothing is raised
|
# Here we just want to ensure that nothing is raised
|
||||||
writer = writers.create(minimal_configuration)
|
writer = writers.create(minimal_configuration)
|
||||||
writer()
|
writer()
|
||||||
|
@ -13,7 +13,7 @@ def singularity_configuration(minimal_configuration):
|
|||||||
return minimal_configuration
|
return minimal_configuration
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_render_works(singularity_configuration):
|
def test_ensure_render_works(default_config, singularity_configuration):
|
||||||
container_config = singularity_configuration['spack']['container']
|
container_config = singularity_configuration['spack']['container']
|
||||||
assert container_config['format'] == 'singularity'
|
assert container_config['format'] == 'singularity'
|
||||||
# Here we just want to ensure that nothing is raised
|
# Here we just want to ensure that nothing is raised
|
||||||
|
@ -579,7 +579,7 @@ _spack_config() {
|
|||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help --scope"
|
SPACK_COMPREPLY="-h --help --scope"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY="get blame edit list"
|
SPACK_COMPREPLY="get blame edit list add remove rm"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -614,6 +614,33 @@ _spack_config_list() {
|
|||||||
SPACK_COMPREPLY="-h --help"
|
SPACK_COMPREPLY="-h --help"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_spack_config_add() {
|
||||||
|
if $list_options
|
||||||
|
then
|
||||||
|
SPACK_COMPREPLY="-h --help -f --file"
|
||||||
|
else
|
||||||
|
SPACK_COMPREPLY=""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_spack_config_remove() {
|
||||||
|
if $list_options
|
||||||
|
then
|
||||||
|
SPACK_COMPREPLY="-h --help"
|
||||||
|
else
|
||||||
|
SPACK_COMPREPLY=""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_spack_config_rm() {
|
||||||
|
if $list_options
|
||||||
|
then
|
||||||
|
SPACK_COMPREPLY="-h --help"
|
||||||
|
else
|
||||||
|
SPACK_COMPREPLY=""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
_spack_configure() {
|
_spack_configure() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
|
Loading…
Reference in New Issue
Block a user