Circular import fix: spack.schema.config -> spack.config (#48059)

fix by moving `merge_yaml` from `config.py` to `schema/__init__.py`
This commit is contained in:
Tamara Dahlgren 2024-12-12 01:07:53 -08:00 committed by GitHub
parent 9aefbb0e96
commit f018e0fe42
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 163 additions and 156 deletions

View File

@ -26,6 +26,7 @@
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
import spack.mirrors.mirror import spack.mirrors.mirror
import spack.schema
import spack.spec import spack.spec
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url as url_util import spack.util.url as url_util
@ -608,7 +609,7 @@ def __apply_submapping(self, dest, spec, section):
if "build-job-remove" in match_attrs: if "build-job-remove" in match_attrs:
spack.config.remove_yaml(dest, attrs["build-job-remove"]) spack.config.remove_yaml(dest, attrs["build-job-remove"])
if "build-job" in match_attrs: if "build-job" in match_attrs:
spack.config.merge_yaml(dest, attrs["build-job"]) spack.schema.merge_yaml(dest, attrs["build-job"])
break break
if matched and only_first: if matched and only_first:
break break
@ -679,7 +680,7 @@ def _apply_section(dest, src):
if do_remove: if do_remove:
dest = spack.config.remove_yaml(dest, src[remove_job_name]) dest = spack.config.remove_yaml(dest, src[remove_job_name])
if do_merge: if do_merge:
dest = copy.copy(spack.config.merge_yaml(dest, src[merge_job_name])) dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
if name == "build": if name == "build":
# Apply attributes to all build jobs # Apply attributes to all build jobs
@ -808,7 +809,7 @@ def job_query(job):
tty.warn(f"Response missing required keys: {missing_keys}") tty.warn(f"Response missing required keys: {missing_keys}")
if clean_config: if clean_config:
job["attributes"] = spack.config.merge_yaml( job["attributes"] = spack.schema.merge_yaml(
job.get("attributes", {}), clean_config job.get("attributes", {}), clean_config
) )

View File

@ -15,6 +15,7 @@
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.config as cfg import spack.config as cfg
import spack.mirrors.mirror import spack.mirrors.mirror
import spack.schema
import spack.spec import spack.spec
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
@ -248,7 +249,7 @@ def main_script_replacements(cmd):
build_stamp = options.cdash_handler.build_stamp build_stamp = options.cdash_handler.build_stamp
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
job_object["artifacts"] = cfg.merge_yaml( job_object["artifacts"] = spack.schema.merge_yaml(
job_object.get("artifacts", {}), job_object.get("artifacts", {}),
{ {
"when": "always", "when": "always",

View File

@ -14,6 +14,7 @@
import spack.config import spack.config
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
import spack.schema
import spack.schema.env import spack.schema.env
import spack.spec import spack.spec
import spack.store import spack.store
@ -566,7 +567,7 @@ def config_prefer_upstream(args):
# Simply write the config to the specified file. # Simply write the config to the specified file.
existing = spack.config.get("packages", scope=scope) existing = spack.config.get("packages", scope=scope)
new = spack.config.merge_yaml(existing, pkgs) new = spack.schema.merge_yaml(existing, pkgs)
spack.config.set("packages", new, scope) spack.config.set("packages", new, scope)
config_file = spack.config.CONFIG.get_config_filename(scope, section) config_file = spack.config.CONFIG.get_config_filename(scope, section)

View File

@ -619,7 +619,7 @@ def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfig
if changed: if changed:
self.format_updates[section].append(scope) self.format_updates[section].append(scope)
merged_section = merge_yaml(merged_section, data) merged_section = spack.schema.merge_yaml(merged_section, data)
# no config files -- empty config. # no config files -- empty config.
if section not in merged_section: if section not in merged_section:
@ -680,7 +680,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
while len(parts) > 1: while len(parts) > 1:
key = parts.pop(0) key = parts.pop(0)
if _override(key): if spack.schema.override(key):
new = type(data[key])() new = type(data[key])()
del data[key] del data[key]
else: else:
@ -693,7 +693,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
data[key] = new data[key] = new
data = new data = new
if _override(parts[0]): if spack.schema.override(parts[0]):
data.pop(parts[0], None) data.pop(parts[0], None)
# update new value # update new value
@ -894,7 +894,7 @@ def add_from_file(filename: str, scope: Optional[str] = None) -> None:
value = data[section] value = data[section]
existing = get(section, scope=scope) existing = get(section, scope=scope)
new = merge_yaml(existing, value) new = spack.schema.merge_yaml(existing, value)
# We cannot call config.set directly (set is a type) # We cannot call config.set directly (set is a type)
CONFIG.set(section, new, scope) CONFIG.set(section, new, scope)
@ -946,7 +946,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
value: List[str] = [value] # type: ignore[no-redef] value: List[str] = [value] # type: ignore[no-redef]
# merge value into existing # merge value into existing
new = merge_yaml(existing, value) new = spack.schema.merge_yaml(existing, value)
CONFIG.set(path, new, scope) CONFIG.set(path, new, scope)
@ -1120,44 +1120,6 @@ def read_config_file(
raise ConfigFileError(str(e)) from e raise ConfigFileError(str(e)) from e
def _override(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
and if they do, their values completely replace lower-precedence
configs instead of merging into them.
"""
return hasattr(string, "override") and string.override
def _append(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values append lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : append lists.
"""
return getattr(string, "append", False)
def _prepend(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values prepend lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : prepend lists. (default behavior)
"""
return getattr(string, "prepend", False)
def _mark_internal(data, name): def _mark_internal(data, name):
"""Add a simple name mark to raw YAML/JSON data. """Add a simple name mark to raw YAML/JSON data.
@ -1260,7 +1222,7 @@ def they_are(t):
unmerge = sk in dest unmerge = sk in dest
old_dest_value = dest.pop(sk, None) old_dest_value = dest.pop(sk, None)
if unmerge and not _override(sk): if unmerge and not spack.schema.override(sk):
dest[sk] = remove_yaml(old_dest_value, sv) dest[sk] = remove_yaml(old_dest_value, sv)
return dest return dest
@ -1270,81 +1232,6 @@ def they_are(t):
return dest return dest
def merge_yaml(dest, source, prepend=False, append=False):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = merge_yaml(dest, source)
In the result, elements from lists from ``source`` will appear before
elements of lists from ``dest``. Likewise, when iterating over keys
or items in merged ``OrderedDict`` objects, keys from ``source`` will
appear before keys from ``dest``.
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
`+:` will extend the default prepend merge strategy to include string concatenation
`-:` will change the merge strategy to append, it also includes string concatentation
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
# If source is None, overwrite with source.
if source is None:
return None
# Source list is prepended (for precedence)
if they_are(list):
if append:
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source] + source
else:
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
# save dest keys to reinsert later -- this ensures that source items
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).
merge = sk in dest
old_dest_value = dest.pop(sk, None)
if merge and not _override(sk):
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
else:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.deepcopy(sv)
# reinsert dest keys so they are last in the result
for dk in dest_keys:
dest[dk] = dest.pop(dk)
return dest
elif they_are(str):
# Concatenate strings in prepend mode
if prepend:
return source + dest
elif append:
return dest + source
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return copy.copy(source)
class ConfigPath: class ConfigPath:
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')" quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
unquoted_string = "[^:'\"]+" unquoted_string = "[^:'\"]+"

View File

@ -27,6 +27,7 @@
import spack.config import spack.config
import spack.error import spack.error
import spack.operating_systems.windows_os as winOs import spack.operating_systems.windows_os as winOs
import spack.schema
import spack.spec import spack.spec
import spack.util.environment import spack.util.environment
import spack.util.spack_yaml import spack.util.spack_yaml
@ -226,7 +227,7 @@ def update_configuration(
pkg_to_cfg[package_name] = pkg_config pkg_to_cfg[package_name] = pkg_config
pkgs_cfg = spack.config.get("packages", scope=scope) pkgs_cfg = spack.config.get("packages", scope=scope)
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg) pkgs_cfg = spack.schema.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set("packages", pkgs_cfg, scope=scope) spack.config.set("packages", pkgs_cfg, scope=scope)
return all_new_specs return all_new_specs
@ -246,7 +247,7 @@ def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -
# Update the provided scope # Update the provided scope
spack.config.set( spack.config.set(
"packages", "packages",
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config), spack.schema.merge_yaml(spack.config.get("packages", scope=scope), new_config),
scope=scope, scope=scope,
) )

View File

@ -48,6 +48,7 @@
import spack.error import spack.error
import spack.paths import spack.paths
import spack.projections as proj import spack.projections as proj
import spack.schema
import spack.schema.environment import spack.schema.environment
import spack.spec import spack.spec
import spack.store import spack.store
@ -216,7 +217,7 @@ def root_path(name, module_set_name):
roots = spack.config.get(f"modules:{module_set_name}:roots", {}) roots = spack.config.get(f"modules:{module_set_name}:roots", {})
# Merge config values into the defaults so we prefer configured values # Merge config values into the defaults so we prefer configured values
roots = spack.config.merge_yaml(defaults, roots) roots = spack.schema.merge_yaml(defaults, roots)
path = roots.get(name, os.path.join(spack.paths.share_path, name)) path = roots.get(name, os.path.join(spack.paths.share_path, name))
return spack.util.path.canonicalize_path(path) return spack.util.path.canonicalize_path(path)
@ -624,10 +625,10 @@ def environment_modifications(self):
"""List of environment modifications to be processed.""" """List of environment modifications to be processed."""
# Modifications guessed by inspecting the spec prefix # Modifications guessed by inspecting the spec prefix
prefix_inspections = syaml.syaml_dict() prefix_inspections = syaml.syaml_dict()
spack.config.merge_yaml( spack.schema.merge_yaml(
prefix_inspections, spack.config.get("modules:prefix_inspections", {}) prefix_inspections, spack.config.get("modules:prefix_inspections", {})
) )
spack.config.merge_yaml( spack.schema.merge_yaml(
prefix_inspections, prefix_inspections,
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}), spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
) )

View File

@ -3,6 +3,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains jsonschema files for all of Spack's YAML formats.""" """This module contains jsonschema files for all of Spack's YAML formats."""
import copy
import typing import typing
import warnings import warnings
@ -73,3 +74,116 @@ def _deprecated_properties(validator, deprecated, instance, schema):
Validator = llnl.util.lang.Singleton(_make_validator) Validator = llnl.util.lang.Singleton(_make_validator)
def _append(string: str) -> bool:
"""Test if a spack YAML string is an append.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values append lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : append lists.
"""
return getattr(string, "append", False)
def _prepend(string: str) -> bool:
"""Test if a spack YAML string is an prepend.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values prepend lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : prepend lists. (default behavior)
"""
return getattr(string, "prepend", False)
def override(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
and if they do, their values completely replace lower-precedence
configs instead of merging into them.
"""
return hasattr(string, "override") and string.override
def merge_yaml(dest, source, prepend=False, append=False):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = merge_yaml(dest, source)
In the result, elements from lists from ``source`` will appear before
elements of lists from ``dest``. Likewise, when iterating over keys
or items in merged ``OrderedDict`` objects, keys from ``source`` will
appear before keys from ``dest``.
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
`+:` will extend the default prepend merge strategy to include string concatenation
`-:` will change the merge strategy to append, it also includes string concatentation
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
# If source is None, overwrite with source.
if source is None:
return None
# Source list is prepended (for precedence)
if they_are(list):
if append:
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source] + source
else:
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
# save dest keys to reinsert later -- this ensures that source items
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).
merge = sk in dest
old_dest_value = dest.pop(sk, None)
if merge and not override(sk):
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
else:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.deepcopy(sv)
# reinsert dest keys so they are last in the result
for dk in dest_keys:
dest[dk] = dest.pop(dk)
return dest
elif they_are(str):
# Concatenate strings in prepend mode
if prepend:
return source + dest
elif append:
return dest + source
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return copy.copy(source)

View File

@ -11,7 +11,7 @@
from llnl.util.lang import union_dicts from llnl.util.lang import union_dicts
import spack.config import spack.schema
import spack.schema.projections import spack.schema.projections
#: Properties for inclusion in other schemas #: Properties for inclusion in other schemas
@ -157,7 +157,7 @@ def update(data):
# whether install_tree was updated or not # whether install_tree was updated or not
# we merge the yaml to ensure we don't invalidate other projections # we merge the yaml to ensure we don't invalidate other projections
update_data = data.get("install_tree", {}) update_data = data.get("install_tree", {})
update_data = spack.config.merge_yaml(update_data, projections_data) update_data = spack.schema.merge_yaml(update_data, projections_data)
data["install_tree"] = update_data data["install_tree"] = update_data
changed = True changed = True

View File

@ -639,31 +639,6 @@ def test_read_config_override_list(mock_low_high_config, write_config_file):
} }
def test_ordereddict_merge_order():
""" "Test that source keys come before dest keys in merge_yaml results."""
source = syaml.syaml_dict([("k1", "v1"), ("k2", "v2"), ("k3", "v3")])
dest = syaml.syaml_dict([("k4", "v4"), ("k3", "WRONG"), ("k5", "v5")])
result = spack.config.merge_yaml(dest, source)
assert "WRONG" not in result.values()
expected_keys = ["k1", "k2", "k3", "k4", "k5"]
expected_items = [("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ("k4", "v4"), ("k5", "v5")]
assert expected_keys == list(result.keys())
assert expected_items == list(result.items())
def test_list_merge_order():
""" "Test that source lists are prepended to dest."""
source = ["a", "b", "c"]
dest = ["d", "e", "f"]
result = spack.config.merge_yaml(dest, source)
assert ["a", "b", "c", "d", "e", "f"] == result
def test_internal_config_update(mock_low_high_config, write_config_file): def test_internal_config_update(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low") write_config_file("config", config_low, "low")

View File

@ -11,6 +11,7 @@
import spack.paths import spack.paths
import spack.schema import spack.schema
import spack.util.spack_yaml as syaml
@pytest.fixture() @pytest.fixture()
@ -124,3 +125,28 @@ def test_deprecated_properties(module_suffixes_schema):
data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}} data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}}
# The next validation doesn't raise anymore # The next validation doesn't raise anymore
v.validate(data) v.validate(data)
def test_ordereddict_merge_order():
""" "Test that source keys come before dest keys in merge_yaml results."""
source = syaml.syaml_dict([("k1", "v1"), ("k2", "v2"), ("k3", "v3")])
dest = syaml.syaml_dict([("k4", "v4"), ("k3", "WRONG"), ("k5", "v5")])
result = spack.schema.merge_yaml(dest, source)
assert "WRONG" not in result.values()
expected_keys = ["k1", "k2", "k3", "k4", "k5"]
expected_items = [("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ("k4", "v4"), ("k5", "v5")]
assert expected_keys == list(result.keys())
assert expected_items == list(result.items())
def test_list_merge_order():
""" "Test that source lists are prepended to dest."""
source = ["a", "b", "c"]
dest = ["d", "e", "f"]
result = spack.schema.merge_yaml(dest, source)
assert ["a", "b", "c", "d", "e", "f"] == result