fix %compiler satisfaction with specs v4 format (#50140)
This PR improves compatibility with specs installed before #45189, and with externals specifying a compiler, by using the annotated compiler to "satisfy" a spec query. On top of that, the PR adds a new flag for: ```console $ spack find --specfile-format -I %gcc -- linux-ubuntu20.04-icelake / gcc@10.5.0 ----------------------- [+] [v4] ca-certificates-mozilla@2023-05-30 [e] [v4] cmake@3.31.6 [+] [v4] gcc-runtime@10.5.0 [e] [v4] glibc@2.31 [+] [v4] gmake@4.4.1 [+] [v4] hdf5@1.14.5 [+] [v4] pkgconf@2.2.0 [+] [v4] zlib-ng@2.2.1 ==> 8 installed packages ``` which shows the specfile format of the specs being retrieved.
This commit is contained in:
parent
8e2caa2b83
commit
ecc3752ee9
@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
status_fn (typing.Callable): if provided, prepend install-status info
|
status_fn (typing.Callable): if provided, prepend install-status info
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
|
specfile_format (bool): specfile format of the current spec
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_arg(name, default=None):
|
def get_arg(name, default=None):
|
||||||
@ -458,6 +458,7 @@ def get_arg(name, default=None):
|
|||||||
all_headers = get_arg("all_headers", False)
|
all_headers = get_arg("all_headers", False)
|
||||||
output = get_arg("output", sys.stdout)
|
output = get_arg("output", sys.stdout)
|
||||||
status_fn = get_arg("status_fn", None)
|
status_fn = get_arg("status_fn", None)
|
||||||
|
specfile_format = get_arg("specfile_format", False)
|
||||||
|
|
||||||
decorator = get_arg("decorator", None)
|
decorator = get_arg("decorator", None)
|
||||||
if decorator is None:
|
if decorator is None:
|
||||||
@ -479,6 +480,9 @@ def get_arg(name, default=None):
|
|||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||||
|
|
||||||
|
if specfile_format:
|
||||||
|
format_string = "[{specfile_version}] " + format_string
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
string = ""
|
string = ""
|
||||||
|
@ -51,6 +51,12 @@ def setup_parser(subparser):
|
|||||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
subparser.add_argument(
|
||||||
|
"--specfile-format",
|
||||||
|
action="store_true",
|
||||||
|
help="show the specfile format for installed deps ",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||||
)
|
)
|
||||||
@ -280,6 +286,7 @@ def root_decorator(spec, string):
|
|||||||
show_flags=True,
|
show_flags=True,
|
||||||
decorator=root_decorator,
|
decorator=root_decorator,
|
||||||
variants=True,
|
variants=True,
|
||||||
|
specfile_format=args.specfile_format,
|
||||||
)
|
)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
@ -301,6 +308,7 @@ def root_decorator(spec, string):
|
|||||||
namespace=True,
|
namespace=True,
|
||||||
show_flags=True,
|
show_flags=True,
|
||||||
variants=True,
|
variants=True,
|
||||||
|
specfile_format=args.specfile_format,
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@ -390,7 +398,12 @@ def find(parser, args):
|
|||||||
if args.show_concretized:
|
if args.show_concretized:
|
||||||
display_results += concretized_but_not_installed
|
display_results += concretized_but_not_installed
|
||||||
cmd.display_specs(
|
cmd.display_specs(
|
||||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
display_results,
|
||||||
|
args,
|
||||||
|
decorator=decorator,
|
||||||
|
all_headers=True,
|
||||||
|
status_fn=status_fn,
|
||||||
|
specfile_format=args.specfile_format,
|
||||||
)
|
)
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
|
@ -3862,6 +3862,17 @@ def external_spec_selected(self, node, idx):
|
|||||||
)
|
)
|
||||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||||
|
|
||||||
|
# Annotate compiler specs from externals
|
||||||
|
external_spec = spack.spec.Spec(spec_info["spec"])
|
||||||
|
external_spec_deps = external_spec.dependencies()
|
||||||
|
if len(external_spec_deps) > 1:
|
||||||
|
raise InvalidExternalError(
|
||||||
|
f"external spec {spec_info['spec']} cannot have more than one dependency"
|
||||||
|
)
|
||||||
|
elif len(external_spec_deps) == 1:
|
||||||
|
compiler_str = external_spec_deps[0]
|
||||||
|
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
|
||||||
|
|
||||||
# If this is an extension, update the dependencies to include the extendee
|
# If this is an extension, update the dependencies to include the extendee
|
||||||
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
||||||
extendee_spec = package.extendee_spec
|
extendee_spec = package.extendee_spec
|
||||||
@ -4765,3 +4776,7 @@ class InvalidSpliceError(spack.error.SpackError):
|
|||||||
|
|
||||||
class NoCompilerFoundError(spack.error.SpackError):
|
class NoCompilerFoundError(spack.error.SpackError):
|
||||||
"""Raised when there is no possible compiler"""
|
"""Raised when there is no possible compiler"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidExternalError(spack.error.SpackError):
|
||||||
|
"""Raised when there is no possible compiler"""
|
||||||
|
@ -184,6 +184,7 @@ literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root)
|
|||||||
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
|
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
|
||||||
literal_node(Root, LiteralNode),
|
literal_node(Root, LiteralNode),
|
||||||
build(LiteralNode),
|
build(LiteralNode),
|
||||||
|
not external(LiteralNode),
|
||||||
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
|
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
|
||||||
|
|
||||||
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
|
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
|
||||||
@ -490,6 +491,7 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
|||||||
build(node(X, Parent)),
|
build(node(X, Parent)),
|
||||||
not external(node(X, Parent)).
|
not external(node(X, Parent)).
|
||||||
|
|
||||||
|
% Concrete nodes
|
||||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||||
concrete(ParentNode),
|
concrete(ParentNode),
|
||||||
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
|
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
|
||||||
@ -503,6 +505,23 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
|||||||
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
||||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||||
|
|
||||||
|
% External nodes
|
||||||
|
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||||
|
external(ParentNode),
|
||||||
|
not attr("external_build_requirement", ParentNode, build_requirement("node", BuildDependency)).
|
||||||
|
|
||||||
|
candidate_external_version(Constraint, BuildDependency, Version)
|
||||||
|
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||||
|
external(ParentNode),
|
||||||
|
pkg_fact(BuildDependency, version_satisfies(Constraint, Version)).
|
||||||
|
|
||||||
|
error(100, "External {0} cannot satisfy both {1} and {2}", BuildDependency, LiteralConstraint, ExternalConstraint)
|
||||||
|
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, LiteralConstraint)),
|
||||||
|
external(ParentNode),
|
||||||
|
attr("external_build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, ExternalConstraint)),
|
||||||
|
not 1 { pkg_fact(BuildDependency, version_satisfies(ExternalConstraint, Version)) : candidate_external_version(LiteralConstraint, BuildDependency, Version) }.
|
||||||
|
|
||||||
|
|
||||||
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
|
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
|
||||||
% we have on externals
|
% we have on externals
|
||||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||||
|
@ -1429,7 +1429,7 @@ def with_compiler(self, compiler: "Spec") -> "SpecAnnotations":
|
|||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
|
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
|
||||||
if self.compiler_node_attribute:
|
if self.compiler_node_attribute:
|
||||||
result += f"with_compiler({str(self.compiler_node_attribute)})"
|
result += f".with_compiler({str(self.compiler_node_attribute)})"
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -3394,7 +3394,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# If we have no dependencies, we can't satisfy any constraints.
|
# If we have no dependencies, we can't satisfy any constraints.
|
||||||
if not self._dependencies:
|
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||||
@ -3405,6 +3405,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
||||||
# will be verified later.
|
# will be verified later.
|
||||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||||
|
mock_nodes_from_old_specfiles = set()
|
||||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||||
# If we are checking for ^mpi we need to verify if there is any edge
|
# If we are checking for ^mpi we need to verify if there is any edge
|
||||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||||
@ -3426,13 +3427,27 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
candidates = current_node.dependencies(
|
if current_node.original_spec_format() < 5 or (
|
||||||
name=rhs_edge.spec.name,
|
current_node.original_spec_format() >= 5 and current_node.external
|
||||||
deptype=rhs_edge.depflag,
|
):
|
||||||
virtuals=rhs_edge.virtuals or None,
|
compiler_spec = current_node.annotations.compiler_node_attribute
|
||||||
)
|
if compiler_spec is None:
|
||||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
return False
|
||||||
return False
|
|
||||||
|
mock_nodes_from_old_specfiles.add(compiler_spec)
|
||||||
|
# This checks that the single node compiler spec satisfies the request
|
||||||
|
# of a direct dependency. The check is not perfect, but based on heuristic.
|
||||||
|
if not compiler_spec.satisfies(rhs_edge.spec):
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
|
candidates = current_node.dependencies(
|
||||||
|
name=rhs_edge.spec.name,
|
||||||
|
deptype=rhs_edge.depflag,
|
||||||
|
virtuals=rhs_edge.virtuals or None,
|
||||||
|
)
|
||||||
|
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||||
|
return False
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -3472,8 +3487,9 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Edges have been checked above already, hence deps=False
|
# Edges have been checked above already, hence deps=False
|
||||||
|
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||||
return all(
|
return all(
|
||||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||||
for rhs in other.traverse(root=False)
|
for rhs in other.traverse(root=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3947,6 +3963,8 @@ def format_attribute(match_object: Match) -> str:
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
if part == "compiler":
|
if part == "compiler":
|
||||||
return "none"
|
return "none"
|
||||||
|
elif part == "specfile_version":
|
||||||
|
return f"v{current.original_spec_format()}"
|
||||||
|
|
||||||
raise SpecFormatStringError(
|
raise SpecFormatStringError(
|
||||||
f"Attempted to format attribute {attribute}. "
|
f"Attempted to format attribute {attribute}. "
|
||||||
|
@ -3380,3 +3380,63 @@ def test_input_analysis_and_conditional_requirements(default_mock_concretization
|
|||||||
libceed = default_mock_concretization("libceed")
|
libceed = default_mock_concretization("libceed")
|
||||||
assert libceed["libxsmm"].satisfies("@main")
|
assert libceed["libxsmm"].satisfies("@main")
|
||||||
assert libceed["libxsmm"].satisfies("platform=test")
|
assert libceed["libxsmm"].satisfies("platform=test")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"compiler_str,expected,not_expected",
|
||||||
|
[
|
||||||
|
# Compiler queries are as specific as the constraint on the external
|
||||||
|
("gcc@10", ["%gcc", "%gcc@10"], ["%clang", "%gcc@9"]),
|
||||||
|
("gcc", ["%gcc"], ["%clang", "%gcc@9", "%gcc@10"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.regression("49841")
|
||||||
|
def test_installing_external_with_compilers_directly(
|
||||||
|
compiler_str, expected, not_expected, mutable_config, mock_packages, tmp_path
|
||||||
|
):
|
||||||
|
"""Tests that version constraints are taken into account for compiler annotations
|
||||||
|
on externals
|
||||||
|
"""
|
||||||
|
spec_str = f"libelf@0.8.12 %{compiler_str}"
|
||||||
|
packages_yaml = syaml.load_config(
|
||||||
|
f"""
|
||||||
|
packages:
|
||||||
|
libelf:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: {spec_str}
|
||||||
|
prefix: {tmp_path / 'libelf'}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
mutable_config.set("packages", packages_yaml["packages"])
|
||||||
|
s = spack.concretize.concretize_one(spec_str)
|
||||||
|
|
||||||
|
assert s.external
|
||||||
|
assert all(s.satisfies(c) for c in expected)
|
||||||
|
assert all(not s.satisfies(c) for c in not_expected)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("49841")
|
||||||
|
def test_using_externals_with_compilers(mutable_config, mock_packages, tmp_path):
|
||||||
|
"""Tests that version constraints are taken into account for compiler annotations
|
||||||
|
on externals, even imposed as transitive deps.
|
||||||
|
"""
|
||||||
|
packages_yaml = syaml.load_config(
|
||||||
|
f"""
|
||||||
|
packages:
|
||||||
|
libelf:
|
||||||
|
buildable: false
|
||||||
|
externals:
|
||||||
|
- spec: libelf@0.8.12 %gcc@10
|
||||||
|
prefix: {tmp_path / 'libelf'}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
mutable_config.set("packages", packages_yaml["packages"])
|
||||||
|
|
||||||
|
with pytest.raises(spack.error.SpackError):
|
||||||
|
spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@:9")
|
||||||
|
|
||||||
|
s = spack.concretize.concretize_one("dyninst%gcc@10.2.1 ^libelf@0.8.12 %gcc@10:")
|
||||||
|
|
||||||
|
libelf = s["libelf"]
|
||||||
|
assert libelf.external and libelf.satisfies("%gcc")
|
||||||
|
BIN
lib/spack/spack/test/data/database/index.json.v7_v8.json.gz
Normal file
BIN
lib/spack/spack/test/data/database/index.json.v7_v8.json.gz
Normal file
Binary file not shown.
@ -5,6 +5,7 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
import datetime
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
|
import gzip
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
@ -32,6 +33,7 @@
|
|||||||
import spack.database
|
import spack.database
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@ -1243,3 +1245,26 @@ def test_query_with_predicate_fn(database):
|
|||||||
|
|
||||||
specs = database.query(predicate_fn=lambda x: not spack.repo.PATH.exists(x.spec.name))
|
specs = database.query(predicate_fn=lambda x: not spack.repo.PATH.exists(x.spec.name))
|
||||||
assert not specs
|
assert not specs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("49964")
|
||||||
|
def test_querying_reindexed_database_specfilev5(tmp_path):
|
||||||
|
"""Tests that we can query a reindexed database from before compilers as dependencies,
|
||||||
|
and get appropriate results for %<compiler> and similar selections.
|
||||||
|
"""
|
||||||
|
test_path = pathlib.Path(spack.paths.test_path)
|
||||||
|
zipfile = test_path / "data" / "database" / "index.json.v7_v8.json.gz"
|
||||||
|
with gzip.open(str(zipfile), "rt", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
index_json = tmp_path / spack.database._DB_DIRNAME / spack.database.INDEX_JSON_FILE
|
||||||
|
index_json.parent.mkdir(parents=True)
|
||||||
|
index_json.write_text(json.dumps(data))
|
||||||
|
|
||||||
|
db = spack.database.Database(str(tmp_path))
|
||||||
|
|
||||||
|
specs = db.query("%gcc")
|
||||||
|
|
||||||
|
assert len(specs) == 8
|
||||||
|
assert len([x for x in specs if x.external]) == 2
|
||||||
|
assert len([x for x in specs if x.original_spec_format() < 5]) == 8
|
||||||
|
@ -433,6 +433,10 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
|||||||
assert s2.format("{compiler.name}") == "gcc"
|
assert s2.format("{compiler.name}") == "gcc"
|
||||||
assert s2.format("{compiler.version}") != "none"
|
assert s2.format("{compiler.version}") != "none"
|
||||||
|
|
||||||
|
# Ensure satisfies still works with compilers
|
||||||
|
assert s2.satisfies("%gcc")
|
||||||
|
assert s2.satisfies("%gcc@9.4.0")
|
||||||
|
|
||||||
|
|
||||||
def test_anchorify_1():
|
def test_anchorify_1():
|
||||||
"""Test that anchorify replaces duplicate values with references to a single instance, and
|
"""Test that anchorify replaces duplicate values with references to a single instance, and
|
||||||
|
@ -1214,7 +1214,7 @@ _spack_fetch() {
|
|||||||
_spack_find() {
|
_spack_find() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help --format -H --hashes --json -I --install-status -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -N --namespaces -r --only-roots -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --only-deprecated --deprecated --install-tree --start-date --end-date"
|
SPACK_COMPREPLY="-h --help --format -H --hashes --json -I --install-status --specfile-format -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -N --namespaces -r --only-roots -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --only-deprecated --deprecated --install-tree --start-date --end-date"
|
||||||
else
|
else
|
||||||
_installed_packages
|
_installed_packages
|
||||||
fi
|
fi
|
||||||
|
@ -1786,7 +1786,7 @@ complete -c spack -n '__fish_spack_using_command fetch' -l deprecated -f -a conf
|
|||||||
complete -c spack -n '__fish_spack_using_command fetch' -l deprecated -d 'allow concretizer to select deprecated versions'
|
complete -c spack -n '__fish_spack_using_command fetch' -l deprecated -d 'allow concretizer to select deprecated versions'
|
||||||
|
|
||||||
# spack find
|
# spack find
|
||||||
set -g __fish_spack_optspecs_spack_find h/help format= H/hashes json I/install-status d/deps p/paths groups no-groups l/long L/very-long t/tag= N/namespaces r/only-roots c/show-concretized f/show-flags show-full-compiler x/explicit X/implicit u/unknown m/missing v/variants loaded M/only-missing only-deprecated deprecated install-tree= start-date= end-date=
|
set -g __fish_spack_optspecs_spack_find h/help format= H/hashes json I/install-status specfile-format d/deps p/paths groups no-groups l/long L/very-long t/tag= N/namespaces r/only-roots c/show-concretized f/show-flags show-full-compiler x/explicit X/implicit u/unknown m/missing v/variants loaded M/only-missing only-deprecated deprecated install-tree= start-date= end-date=
|
||||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 find' -f -a '(__fish_spack_installed_specs)'
|
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 find' -f -a '(__fish_spack_installed_specs)'
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s h -l help -f -a help
|
complete -c spack -n '__fish_spack_using_command find' -s h -l help -f -a help
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s h -l help -d 'show this help message and exit'
|
complete -c spack -n '__fish_spack_using_command find' -s h -l help -d 'show this help message and exit'
|
||||||
@ -1798,6 +1798,8 @@ complete -c spack -n '__fish_spack_using_command find' -l json -f -a json
|
|||||||
complete -c spack -n '__fish_spack_using_command find' -l json -d 'output specs as machine-readable json records'
|
complete -c spack -n '__fish_spack_using_command find' -l json -d 'output specs as machine-readable json records'
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -f -a install_status
|
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -f -a install_status
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -d 'show install status of packages'
|
complete -c spack -n '__fish_spack_using_command find' -s I -l install-status -d 'show install status of packages'
|
||||||
|
complete -c spack -n '__fish_spack_using_command find' -l specfile-format -f -a specfile_format
|
||||||
|
complete -c spack -n '__fish_spack_using_command find' -l specfile-format -d 'show the specfile format for installed deps '
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -f -a deps
|
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -f -a deps
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -d 'output dependencies along with found specs'
|
complete -c spack -n '__fish_spack_using_command find' -s d -l deps -d 'output dependencies along with found specs'
|
||||||
complete -c spack -n '__fish_spack_using_command find' -s p -l paths -f -a paths
|
complete -c spack -n '__fish_spack_using_command find' -s p -l paths -f -a paths
|
||||||
|
Loading…
Reference in New Issue
Block a user