style.py: add spack style --spec-strings
for compat with v1.0 (#49485)
* style.py: add spack style --spec-strings for compat with v1.0 * add --fix also, and avoid infinite recursion and too large files * tests: check identify and check edit files
This commit is contained in:
parent
324d733bf9
commit
87926e40a9
@ -6,8 +6,9 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import warnings
|
||||||
from itertools import islice, zip_longest
|
from itertools import islice, zip_longest
|
||||||
from typing import Dict, List, Optional
|
from typing import Callable, Dict, List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@ -16,6 +17,9 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
import spack.util.spack_yaml
|
||||||
|
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
||||||
|
from spack.tokenize import Token
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
@ -198,6 +202,13 @@ def setup_parser(subparser):
|
|||||||
action="append",
|
action="append",
|
||||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--spec-strings",
|
||||||
|
action="store_true",
|
||||||
|
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
||||||
|
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
||||||
|
"will be removed in Spack v1.0.",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||||
|
|
||||||
@ -507,7 +518,196 @@ def _bootstrap_dev_dependencies():
|
|||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
|
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
||||||
|
# only move the compiler to the back if it exists and is not already at the end
|
||||||
|
if not 0 <= idx < len(blocks) - 1:
|
||||||
|
return
|
||||||
|
# if there's only whitespace after the compiler, don't move it
|
||||||
|
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||||
|
return
|
||||||
|
# rotate left and always add at least one WS token between compiler and previous token
|
||||||
|
compiler_block = blocks.pop(idx)
|
||||||
|
if compiler_block[0].kind != SpecTokens.WS:
|
||||||
|
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
||||||
|
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
||||||
|
# WS tokens.
|
||||||
|
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
||||||
|
blocks[0].pop(0)
|
||||||
|
blocks.append(compiler_block)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||||
|
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
||||||
|
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
||||||
|
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
||||||
|
# the end when we hit a dependency ^... or the end of a string.
|
||||||
|
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
||||||
|
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
||||||
|
|
||||||
|
current_block: List[Token] = []
|
||||||
|
blocks: List[List[Token]] = []
|
||||||
|
compiler_block_idx = -1
|
||||||
|
in_edge_attr = False
|
||||||
|
|
||||||
|
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
||||||
|
if token.kind == SpecTokens.UNEXPECTED:
|
||||||
|
# parsing error, we cannot fix this string.
|
||||||
|
return None
|
||||||
|
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
||||||
|
# multiple compilers are not supported in Spack v0.x, so early return
|
||||||
|
if compiler_block_idx != -1:
|
||||||
|
return None
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
compiler_block_idx = len(blocks) - 1
|
||||||
|
elif token.kind in (
|
||||||
|
SpecTokens.START_EDGE_PROPERTIES,
|
||||||
|
SpecTokens.DEPENDENCY,
|
||||||
|
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||||
|
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||||
|
):
|
||||||
|
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||||
|
compiler_block_idx = -1
|
||||||
|
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
||||||
|
in_edge_attr = True
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
||||||
|
in_edge_attr = False
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif in_edge_attr:
|
||||||
|
current_block.append(token)
|
||||||
|
elif token.kind in (
|
||||||
|
SpecTokens.VERSION_HASH_PAIR,
|
||||||
|
SpecTokens.GIT_VERSION,
|
||||||
|
SpecTokens.VERSION,
|
||||||
|
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||||
|
SpecTokens.BOOL_VARIANT,
|
||||||
|
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||||
|
SpecTokens.KEY_VALUE_PAIR,
|
||||||
|
SpecTokens.DAG_HASH,
|
||||||
|
):
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif token.kind == SpecTokens.WS:
|
||||||
|
current_block.append(token)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"unexpected token {token}")
|
||||||
|
|
||||||
|
if current_block:
|
||||||
|
blocks.append(current_block)
|
||||||
|
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||||
|
|
||||||
|
new_spec_str = "".join(token.value for block in blocks for token in block)
|
||||||
|
return new_spec_str if spec_str != new_spec_str else None
|
||||||
|
|
||||||
|
|
||||||
|
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
||||||
|
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
||||||
|
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
||||||
|
"""A SpecStrHandler that updates formatted spec strings in files."""
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
new_line = lines[line - 1].replace(old, new)
|
||||||
|
if new_line == lines[line - 1]:
|
||||||
|
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
||||||
|
return
|
||||||
|
lines[line - 1] = new_line
|
||||||
|
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
||||||
|
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
||||||
|
has_constant = sys.version_info >= (3, 8)
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if has_constant and isinstance(node, ast.Constant):
|
||||||
|
current_str = node.value
|
||||||
|
elif not has_constant and isinstance(node, ast.Str):
|
||||||
|
current_str = node.s
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
if not IS_PROBABLY_COMPILER.search(current_str):
|
||||||
|
continue
|
||||||
|
new = _spec_str_format(current_str)
|
||||||
|
if new is not None:
|
||||||
|
handler(path, node.lineno, node.col_offset, current_str, new)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
||||||
|
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
||||||
|
queue = [data]
|
||||||
|
seen = set()
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
current = queue.pop(0)
|
||||||
|
if id(current) in seen:
|
||||||
|
continue
|
||||||
|
seen.add(id(current))
|
||||||
|
if isinstance(current, dict):
|
||||||
|
queue.extend(current.values())
|
||||||
|
queue.extend(current.keys())
|
||||||
|
elif isinstance(current, list):
|
||||||
|
queue.extend(current)
|
||||||
|
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
||||||
|
new = _spec_str_format(current)
|
||||||
|
if new is not None:
|
||||||
|
mark = getattr(current, "_start_mark", None)
|
||||||
|
if mark:
|
||||||
|
line, col = mark.line + 1, mark.column + 1
|
||||||
|
else:
|
||||||
|
line, col = 0, 0
|
||||||
|
handler(path, line, col, current, new)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_spec_strings(
|
||||||
|
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
||||||
|
) -> None:
|
||||||
|
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
||||||
|
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
||||||
|
for path in paths:
|
||||||
|
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
||||||
|
is_python = path.endswith(".py")
|
||||||
|
if not is_json_or_yaml and not is_python:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
# skip files that are likely too large to be user code or config
|
||||||
|
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
||||||
|
warnings.warn(f"skipping {path}: too large.")
|
||||||
|
continue
|
||||||
|
if is_json_or_yaml:
|
||||||
|
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
||||||
|
elif is_python:
|
||||||
|
_spec_str_ast(path, ast.parse(f.read()), handler)
|
||||||
|
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
||||||
|
warnings.warn(f"skipping {path}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def style(parser, args):
|
||||||
|
if args.spec_strings:
|
||||||
|
if not args.files:
|
||||||
|
tty.die("No files provided to check spec strings.")
|
||||||
|
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
||||||
|
return _check_spec_strings(args.files, handler)
|
||||||
|
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
|
@ -409,3 +409,108 @@ def test_case_sensitive_imports(tmp_path: pathlib.Path):
|
|||||||
def test_pkg_imports():
|
def test_pkg_imports():
|
||||||
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg.builtin.boost") is None
|
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg.builtin.boost") is None
|
||||||
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg") is None
|
assert spack.cmd.style._module_part(spack.paths.prefix, "spack.pkg") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_spec_strings(tmp_path):
|
||||||
|
(tmp_path / "example.py").write_text(
|
||||||
|
"""\
|
||||||
|
def func(x):
|
||||||
|
print("dont fix %s me" % x)
|
||||||
|
return x.satisfies("+foo %gcc +bar") and x.satisfies("%gcc +baz")
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
(tmp_path / "example.json").write_text(
|
||||||
|
"""\
|
||||||
|
{
|
||||||
|
"spec": [
|
||||||
|
"+foo %gcc +bar~nope ^dep %clang +yup @3.2 target=x86_64 /abcdef ^another %gcc ",
|
||||||
|
"%gcc +baz"
|
||||||
|
],
|
||||||
|
"%gcc x=y": 2
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
(tmp_path / "example.yaml").write_text(
|
||||||
|
"""\
|
||||||
|
spec:
|
||||||
|
- "+foo %gcc +bar"
|
||||||
|
- "%gcc +baz"
|
||||||
|
- "this is fine %clang"
|
||||||
|
"%gcc x=y": 2
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
issues = set()
|
||||||
|
|
||||||
|
def collect_issues(path: str, line: int, col: int, old: str, new: str):
|
||||||
|
issues.add((path, line, col, old, new))
|
||||||
|
|
||||||
|
# check for issues with custom handler
|
||||||
|
spack.cmd.style._check_spec_strings(
|
||||||
|
[
|
||||||
|
str(tmp_path / "nonexistent.py"),
|
||||||
|
str(tmp_path / "example.py"),
|
||||||
|
str(tmp_path / "example.json"),
|
||||||
|
str(tmp_path / "example.yaml"),
|
||||||
|
],
|
||||||
|
handler=collect_issues,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert issues == {
|
||||||
|
(
|
||||||
|
str(tmp_path / "example.json"),
|
||||||
|
3,
|
||||||
|
9,
|
||||||
|
"+foo %gcc +bar~nope ^dep %clang +yup @3.2 target=x86_64 /abcdef ^another %gcc ",
|
||||||
|
"+foo +bar~nope %gcc ^dep +yup @3.2 target=x86_64 /abcdef %clang ^another %gcc ",
|
||||||
|
),
|
||||||
|
(str(tmp_path / "example.json"), 4, 9, "%gcc +baz", "+baz %gcc"),
|
||||||
|
(str(tmp_path / "example.json"), 6, 5, "%gcc x=y", "x=y %gcc"),
|
||||||
|
(str(tmp_path / "example.py"), 3, 23, "+foo %gcc +bar", "+foo +bar %gcc"),
|
||||||
|
(str(tmp_path / "example.py"), 3, 57, "%gcc +baz", "+baz %gcc"),
|
||||||
|
(str(tmp_path / "example.yaml"), 2, 5, "+foo %gcc +bar", "+foo +bar %gcc"),
|
||||||
|
(str(tmp_path / "example.yaml"), 3, 5, "%gcc +baz", "+baz %gcc"),
|
||||||
|
(str(tmp_path / "example.yaml"), 5, 1, "%gcc x=y", "x=y %gcc"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# fix the issues in the files
|
||||||
|
spack.cmd.style._check_spec_strings(
|
||||||
|
[
|
||||||
|
str(tmp_path / "nonexistent.py"),
|
||||||
|
str(tmp_path / "example.py"),
|
||||||
|
str(tmp_path / "example.json"),
|
||||||
|
str(tmp_path / "example.yaml"),
|
||||||
|
],
|
||||||
|
handler=spack.cmd.style._spec_str_fix_handler,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
(tmp_path / "example.json").read_text()
|
||||||
|
== """\
|
||||||
|
{
|
||||||
|
"spec": [
|
||||||
|
"+foo +bar~nope %gcc ^dep +yup @3.2 target=x86_64 /abcdef %clang ^another %gcc ",
|
||||||
|
"+baz %gcc"
|
||||||
|
],
|
||||||
|
"x=y %gcc": 2
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
(tmp_path / "example.py").read_text()
|
||||||
|
== """\
|
||||||
|
def func(x):
|
||||||
|
print("dont fix %s me" % x)
|
||||||
|
return x.satisfies("+foo +bar %gcc") and x.satisfies("+baz %gcc")
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
(tmp_path / "example.yaml").read_text()
|
||||||
|
== """\
|
||||||
|
spec:
|
||||||
|
- "+foo +bar %gcc"
|
||||||
|
- "+baz %gcc"
|
||||||
|
- "this is fine %clang"
|
||||||
|
"x=y %gcc": 2
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
@ -1867,7 +1867,7 @@ _spack_stage() {
|
|||||||
_spack_style() {
|
_spack_style() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -b --base -a --all -r --root-relative -U --no-untracked -f --fix --root -t --tool -s --skip"
|
SPACK_COMPREPLY="-h --help -b --base -a --all -r --root-relative -U --no-untracked -f --fix --root -t --tool -s --skip --spec-strings"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY=""
|
SPACK_COMPREPLY=""
|
||||||
fi
|
fi
|
||||||
|
@ -2896,7 +2896,7 @@ complete -c spack -n '__fish_spack_using_command stage' -l deprecated -f -a conf
|
|||||||
complete -c spack -n '__fish_spack_using_command stage' -l deprecated -d 'allow concretizer to select deprecated versions'
|
complete -c spack -n '__fish_spack_using_command stage' -l deprecated -d 'allow concretizer to select deprecated versions'
|
||||||
|
|
||||||
# spack style
|
# spack style
|
||||||
set -g __fish_spack_optspecs_spack_style h/help b/base= a/all r/root-relative U/no-untracked f/fix root= t/tool= s/skip=
|
set -g __fish_spack_optspecs_spack_style h/help b/base= a/all r/root-relative U/no-untracked f/fix root= t/tool= s/skip= spec-strings
|
||||||
|
|
||||||
complete -c spack -n '__fish_spack_using_command style' -s h -l help -f -a help
|
complete -c spack -n '__fish_spack_using_command style' -s h -l help -f -a help
|
||||||
complete -c spack -n '__fish_spack_using_command style' -s h -l help -d 'show this help message and exit'
|
complete -c spack -n '__fish_spack_using_command style' -s h -l help -d 'show this help message and exit'
|
||||||
@ -2916,6 +2916,8 @@ complete -c spack -n '__fish_spack_using_command style' -s t -l tool -r -f -a to
|
|||||||
complete -c spack -n '__fish_spack_using_command style' -s t -l tool -r -d 'specify which tools to run (default: import, isort, black, flake8, mypy)'
|
complete -c spack -n '__fish_spack_using_command style' -s t -l tool -r -d 'specify which tools to run (default: import, isort, black, flake8, mypy)'
|
||||||
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -f -a skip
|
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -f -a skip
|
||||||
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -d 'specify tools to skip (choose from import, isort, black, flake8, mypy)'
|
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -d 'specify tools to skip (choose from import, isort, black, flake8, mypy)'
|
||||||
|
complete -c spack -n '__fish_spack_using_command style' -l spec-strings -f -a spec_strings
|
||||||
|
complete -c spack -n '__fish_spack_using_command style' -l spec-strings -d 'upgrade spec strings in Python, JSON and YAML files for compatibility with Spack v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag will be removed in Spack v1.0.'
|
||||||
|
|
||||||
# spack tags
|
# spack tags
|
||||||
set -g __fish_spack_optspecs_spack_tags h/help i/installed a/all
|
set -g __fish_spack_optspecs_spack_tags h/help i/installed a/all
|
||||||
|
Loading…
Reference in New Issue
Block a user