Make spack environment configurations writable from spack external and spack compiler find (#18165)

* spack config: default modification scope can be an environment

The previous model was that environments are the highest priority config
scope for config reading operations, but were not considered for config
writing operations. Now, the active environment is the highest priority
config scope for both reading and writing operations.

Now spack config add, spack external find and spack compiler set environment 
configuration in the environment by default if an environment is active. This is a
change in default behavior for these routines, but better matches the mental
model for an environment taking precedence over the user's default config file.

* add scope argument to 'spack external find' to choose non-default scope

* Increase testing for config modifications on environments

Co-authored-by: Gregory Becker <becker33@llnl.gov>
This commit is contained in:
Robert Blake 2020-09-05 01:12:26 -07:00 committed by GitHub
parent 704fc475e3
commit ea57171712
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 126 additions and 57 deletions

View File

@ -6,7 +6,6 @@
import collections import collections
import os import os
import re
import shutil import shutil
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@ -178,14 +177,6 @@ def config_list(args):
print(' '.join(list(spack.config.section_schemas))) print(' '.join(list(spack.config.section_schemas)))
def set_config(args, section, new, scope):
if re.match(r'env.*', scope):
e = ev.get_env(args, 'config add')
e.set_config(section, new)
else:
spack.config.set(section, new, scope=scope)
def config_add(args): def config_add(args):
"""Add the given configuration to the specified config scope """Add the given configuration to the specified config scope
@ -217,7 +208,7 @@ def config_add(args):
existing = spack.config.get(section, scope=scope) existing = spack.config.get(section, scope=scope)
new = spack.config.merge_yaml(existing, value) new = spack.config.merge_yaml(existing, value)
set_config(args, section, new, scope) spack.config.set(section, new, scope)
if args.path: if args.path:
components = spack.config.process_config_path(args.path) components = spack.config.process_config_path(args.path)
@ -261,7 +252,7 @@ def config_add(args):
# merge value into existing # merge value into existing
new = spack.config.merge_yaml(existing, value) new = spack.config.merge_yaml(existing, value)
set_config(args, path, new, scope) spack.config.set(path, new, scope)
def config_remove(args): def config_remove(args):
@ -289,7 +280,7 @@ def config_remove(args):
# This should be impossible to reach # This should be impossible to reach
raise spack.config.ConfigError('Config has nested non-dict values') raise spack.config.ConfigError('Config has nested non-dict values')
set_config(args, path, existing, scope) spack.config.set(path, existing, scope)
def _can_update_config_file(scope_dir, cfg_file): def _can_update_config_file(scope_dir, cfg_file):

View File

@ -29,12 +29,19 @@ def setup_parser(subparser):
sp = subparser.add_subparsers( sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='external_command') metavar='SUBCOMMAND', dest='external_command')
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
find_parser = sp.add_parser( find_parser = sp.add_parser(
'find', help='add external packages to packages.yaml' 'find', help='add external packages to packages.yaml'
) )
find_parser.add_argument( find_parser.add_argument(
'--not-buildable', action='store_true', default=False, '--not-buildable', action='store_true', default=False,
help="packages with detected externals won't be built with Spack") help="packages with detected externals won't be built with Spack")
find_parser.add_argument(
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.config.default_modify_scope('packages'),
help="configuration scope to modify")
find_parser.add_argument('packages', nargs=argparse.REMAINDER) find_parser.add_argument('packages', nargs=argparse.REMAINDER)
sp.add_parser( sp.add_parser(
@ -147,11 +154,11 @@ def external_find(args):
packages_to_check = spack.repo.path.all_packages() packages_to_check = spack.repo.path.all_packages()
pkg_to_entries = _get_external_packages(packages_to_check) pkg_to_entries = _get_external_packages(packages_to_check)
new_entries, write_scope = _update_pkg_config( new_entries = _update_pkg_config(
pkg_to_entries, args.not_buildable args.scope, pkg_to_entries, args.not_buildable
) )
if new_entries: if new_entries:
path = spack.config.config.get_config_filename(write_scope, 'packages') path = spack.config.config.get_config_filename(args.scope, 'packages')
msg = ('The following specs have been detected on this system ' msg = ('The following specs have been detected on this system '
'and added to {0}') 'and added to {0}')
tty.msg(msg.format(path)) tty.msg(msg.format(path))
@ -204,7 +211,7 @@ def _get_predefined_externals():
return already_defined_specs return already_defined_specs
def _update_pkg_config(pkg_to_entries, not_buildable): def _update_pkg_config(scope, pkg_to_entries, not_buildable):
predefined_external_specs = _get_predefined_externals() predefined_external_specs = _get_predefined_externals()
pkg_to_cfg, all_new_specs = {}, [] pkg_to_cfg, all_new_specs = {}, []
@ -221,13 +228,12 @@ def _update_pkg_config(pkg_to_entries, not_buildable):
pkg_config['buildable'] = False pkg_config['buildable'] = False
pkg_to_cfg[pkg_name] = pkg_config pkg_to_cfg[pkg_name] = pkg_config
cfg_scope = spack.config.default_modify_scope() pkgs_cfg = spack.config.get('packages', scope=scope)
pkgs_cfg = spack.config.get('packages', scope=cfg_scope)
spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg) spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set('packages', pkgs_cfg, scope=cfg_scope) spack.config.set('packages', pkgs_cfg, scope=scope)
return all_new_specs, cfg_scope return all_new_specs
def _get_external_packages(packages_to_check, system_path_to_exe=None): def _get_external_packages(packages_to_check, system_path_to_exe=None):

View File

@ -141,6 +141,10 @@ def __init__(self, name, path):
self.path = path # path to directory containing configs. self.path = path # path to directory containing configs.
self.sections = syaml.syaml_dict() # sections read from config files. self.sections = syaml.syaml_dict() # sections read from config files.
@property
def is_platform_dependent(self):
return '/' in self.name
def get_section_filename(self, section): def get_section_filename(self, section):
_validate_section_name(section) _validate_section_name(section)
return os.path.join(self.path, "%s.yaml" % section) return os.path.join(self.path, "%s.yaml" % section)
@ -184,18 +188,27 @@ def __init__(self, name, path, schema, yaml_path=None):
Arguments: Arguments:
schema (dict): jsonschema for the file to read schema (dict): jsonschema for the file to read
yaml_path (list): list of dict keys in the schema where yaml_path (list): path in the schema where config data can be
config data can be found; found.
If the schema accepts the following yaml data, the yaml_path
would be ['outer', 'inner']
Elements of ``yaml_path`` can be tuples or lists to represent an .. code-block:: yaml
"or" of keys (e.g. "env" or "spack" is ``('env', 'spack')``)
outer:
inner:
config:
install_tree: $spack/opt/spack
""" """
super(SingleFileScope, self).__init__(name, path) super(SingleFileScope, self).__init__(name, path)
self._raw_data = None self._raw_data = None
self.schema = schema self.schema = schema
self.yaml_path = yaml_path or [] self.yaml_path = yaml_path or []
@property
def is_platform_dependent(self):
return False
def get_section_filename(self, section): def get_section_filename(self, section):
return self.path return self.path
@ -230,32 +243,54 @@ def get_section(self, section):
if self._raw_data is None: if self._raw_data is None:
return None return None
section_data = self._raw_data
for key in self.yaml_path: for key in self.yaml_path:
if self._raw_data is None: if section_data is None:
return None return None
section_data = section_data[key]
# support tuples as "or" in the yaml path for section_key, data in section_data.items():
if isinstance(key, (list, tuple)):
key = first_existing(self._raw_data, key)
self._raw_data = self._raw_data[key]
for section_key, data in self._raw_data.items():
self.sections[section_key] = {section_key: data} self.sections[section_key] = {section_key: data}
return self.sections.get(section, None) return self.sections.get(section, None)
def write_section(self, section): def write_section(self, section):
validate(self.sections, self.schema) data_to_write = self._raw_data
# If there is no existing data, this section SingleFileScope has never
# been written to disk. We need to construct the portion of the data
# from the root of self._raw_data to the level at which the config
# sections are defined. That requires creating keys for every entry in
# self.yaml_path
if not data_to_write:
data_to_write = {}
# reverse because we construct it from the inside out
for key in reversed(self.yaml_path):
data_to_write = {key: data_to_write}
# data_update_pointer is a pointer to the part of data_to_write
# that we are currently updating.
# We start by traversing into the data to the point at which the
# config sections are defined. This means popping the keys from
# self.yaml_path
data_update_pointer = data_to_write
for key in self.yaml_path:
data_update_pointer = data_update_pointer[key]
# For each section, update the data at the level of our pointer
# with the data from the section
for key, data in self.sections.items():
data_update_pointer[key] = data[key]
validate(data_to_write, self.schema)
try: try:
parent = os.path.dirname(self.path) parent = os.path.dirname(self.path)
mkdirp(parent) mkdirp(parent)
tmp = os.path.join(parent, '.%s.tmp' % self.path) tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path))
with open(tmp, 'w') as f: with open(tmp, 'w') as f:
syaml.dump_config(self.sections, stream=f, syaml.dump_config(data_to_write, stream=f,
default_flow_style=False) default_flow_style=False)
os.path.move(tmp, self.path) os.rename(tmp, self.path)
except (yaml.YAMLError, IOError) as e: except (yaml.YAMLError, IOError) as e:
raise ConfigFileError( raise ConfigFileError(
"Error writing to config file: '%s'" % str(e)) "Error writing to config file: '%s'" % str(e))
@ -380,7 +415,9 @@ def remove_scope(self, scope_name):
@property @property
def file_scopes(self): def file_scopes(self):
"""List of writable scopes with an associated file.""" """List of writable scopes with an associated file."""
return [s for s in self.scopes.values() if type(s) == ConfigScope] return [s for s in self.scopes.values()
if (type(s) == ConfigScope
or type(s) == SingleFileScope)]
def highest_precedence_scope(self): def highest_precedence_scope(self):
"""Non-internal scope with highest precedence.""" """Non-internal scope with highest precedence."""
@ -392,7 +429,7 @@ def highest_precedence_non_platform_scope(self):
Platform-specific scopes are of the form scope/platform""" Platform-specific scopes are of the form scope/platform"""
generator = reversed(self.file_scopes) generator = reversed(self.file_scopes)
highest = next(generator, None) highest = next(generator, None)
while highest and '/' in highest.name: while highest and highest.is_platform_dependent:
highest = next(generator, None) highest = next(generator, None)
return highest return highest

View File

@ -853,24 +853,17 @@ def env_file_config_scope_name(self):
def env_file_config_scope(self): def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file.""" """Get the configuration scope for the environment's manifest file."""
config_name = self.env_file_config_scope_name() config_name = self.env_file_config_scope_name()
return spack.config.SingleFileScope(config_name, return spack.config.SingleFileScope(
self.manifest_path, config_name,
spack.schema.env.schema, self.manifest_path,
[spack.schema.env.keys]) spack.schema.env.schema,
[spack.config.first_existing(self.raw_yaml,
spack.schema.env.keys)])
def config_scopes(self): def config_scopes(self):
"""A list of all configuration scopes for this environment.""" """A list of all configuration scopes for this environment."""
return self.included_config_scopes() + [self.env_file_config_scope()] return self.included_config_scopes() + [self.env_file_config_scope()]
def set_config(self, path, value):
"""Set configuration for this environment"""
yaml = config_dict(self.yaml)
keys = spack.config.process_config_path(path)
for key in keys[:-1]:
yaml = yaml[key]
yaml[keys[-1]] = value
self.write()
def destroy(self): def destroy(self):
"""Remove this environment from Spack entirely.""" """Remove this environment from Spack entirely."""
shutil.rmtree(self.path) shutil.rmtree(self.path)

View File

@ -398,12 +398,43 @@ def test_remove_list(mutable_empty_config):
def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path): def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
env = ev.create('test') ev.create('test')
with ev.read('test'):
config('add', 'config:dirty:true')
output = config('get')
expected = """ config:
dirty: true
"""
assert expected in output
def test_config_add_to_env_preserve_comments(mutable_empty_config,
mutable_mock_env_path,
tmpdir):
filepath = str(tmpdir.join('spack.yaml'))
manifest = """# comment
spack: # comment
# comment
specs: # comment
- foo # comment
# comment
view: true # comment
packages: # comment
# comment
all: # comment
# comment
compiler: [gcc] # comment
"""
with open(filepath, 'w') as f:
f.write(manifest)
env = ev.Environment(str(tmpdir))
with env: with env:
config('add', 'config:dirty:true') config('add', 'config:dirty:true')
output = config('get') output = config('get')
expected = ev.default_manifest_yaml expected = manifest
expected += """ config: expected += """ config:
dirty: true dirty: true

View File

@ -58,7 +58,8 @@ def test_find_external_update_config(mutable_config):
] ]
pkg_to_entries = {'cmake': entries} pkg_to_entries = {'cmake': entries}
spack.cmd.external._update_pkg_config(pkg_to_entries, False) scope = spack.config.default_modify_scope('packages')
spack.cmd.external._update_pkg_config(scope, pkg_to_entries, False)
pkgs_cfg = spack.config.get('packages') pkgs_cfg = spack.config.get('packages')
cmake_cfg = pkgs_cfg['cmake'] cmake_cfg = pkgs_cfg['cmake']
@ -154,7 +155,8 @@ def test_find_external_merge(mutable_config, mutable_mock_repo):
) )
] ]
pkg_to_entries = {'find-externals1': entries} pkg_to_entries = {'find-externals1': entries}
spack.cmd.external._update_pkg_config(pkg_to_entries, False) scope = spack.config.default_modify_scope('packages')
spack.cmd.external._update_pkg_config(scope, pkg_to_entries, False)
pkgs_cfg = spack.config.get('packages') pkgs_cfg = spack.config.get('packages')
pkg_cfg = pkgs_cfg['find-externals1'] pkg_cfg = pkgs_cfg['find-externals1']

View File

@ -791,6 +791,15 @@ def test_single_file_scope_section_override(tmpdir, config):
'/x/y/z', '$spack/var/spack/repos/builtin'] '/x/y/z', '$spack/var/spack/repos/builtin']
def test_write_empty_single_file_scope(tmpdir):
env_schema = spack.schema.env.schema
scope = spack.config.SingleFileScope(
'test', str(tmpdir.ensure('config.yaml')), env_schema, ['spack'])
scope.write_section('config')
# confirm we can write empty config
assert not scope.get_section('config')
def check_schema(name, file_contents): def check_schema(name, file_contents):
"""Check a Spack YAML schema against some data""" """Check a Spack YAML schema against some data"""
f = StringIO(file_contents) f = StringIO(file_contents)

View File

@ -860,7 +860,7 @@ _spack_external() {
_spack_external_find() { _spack_external_find() {
if $list_options if $list_options
then then
SPACK_COMPREPLY="-h --help --not-buildable" SPACK_COMPREPLY="-h --help --not-buildable --scope"
else else
_all_packages _all_packages
fi fi