feature: add "spack tags" command (#26136)
This PR adds a "spack tags" command to output package tags or (available) packages with those tags. It also ensures each package is listed in the tag cache ONLY ONCE per tag.
This commit is contained in:
parent
b56f464c29
commit
d4cecd9ab2
@ -16,7 +16,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
@ -57,8 +56,6 @@ def setup_parser(subparser):
|
||||
'-v', '--virtuals', action='store_true', default=False,
|
||||
help='include virtual packages in list')
|
||||
|
||||
arguments.add_common_arguments(subparser, ['tags'])
|
||||
|
||||
|
||||
def filter_by_name(pkgs, args):
|
||||
"""
|
||||
@ -277,13 +274,6 @@ def list(parser, args):
|
||||
# Filter the set appropriately
|
||||
sorted_packages = filter_by_name(pkgs, args)
|
||||
|
||||
# Filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = set(
|
||||
spack.repo.path.packages_with_tags(*args.tags))
|
||||
sorted_packages = set(sorted_packages) & packages_with_tags
|
||||
sorted_packages = sorted(sorted_packages)
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
|
107
lib/spack/spack/cmd/tags.py
Normal file
107
lib/spack/spack/cmd/tags.py
Normal file
@ -0,0 +1,107 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "Show package tags and associated packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
def report_tags(category, tags):
|
||||
buffer = six.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
|
||||
if isatty:
|
||||
num = len(tags)
|
||||
fmt = '{0} package tag'.format(category)
|
||||
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
|
||||
|
||||
if tags:
|
||||
colify.colify(tags, output=buffer, tty=isatty, indent=4)
|
||||
else:
|
||||
buffer.write(" None\n")
|
||||
print(buffer.getvalue())
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"Tags from known packages will be used if no tags are provided on "
|
||||
"the command\nline. If tags are provided, packages with at least one "
|
||||
"will be reported.\n\nYou are not allowed to provide tags and use "
|
||||
"'--all' at the same time."
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-i', '--installed', action='store_true', default=False,
|
||||
help="show information for installed packages only"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true', default=False,
|
||||
help="show packages for all available tags"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'tag',
|
||||
nargs='*',
|
||||
help="show packages with the specified tag"
|
||||
)
|
||||
|
||||
|
||||
def tags(parser, args):
|
||||
# Disallow combining all option with (positional) tags to avoid confusion
|
||||
if args.all and args.tag:
|
||||
tty.die("Use the '--all' option OR provide tag(s) on the command line")
|
||||
|
||||
# Provide a nice, simple message if database is empty
|
||||
if args.installed and not spack.environment.installed_specs():
|
||||
tty.msg("No installed packages")
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
show_packages = args.tag or args.all
|
||||
|
||||
# Only report relevant, available tags if no packages are to be shown
|
||||
if not show_packages:
|
||||
if not args.installed:
|
||||
report_tags("available", available_tags)
|
||||
else:
|
||||
tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True)
|
||||
tags = tag_pkgs.keys() if tag_pkgs else []
|
||||
report_tags("installed", tags)
|
||||
return
|
||||
|
||||
# Report packages associated with tags
|
||||
buffer = six.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
|
||||
tags = args.tag if args.tag else available_tags
|
||||
tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False)
|
||||
missing = 'No installed packages' if args.installed else 'None'
|
||||
for tag in sorted(tag_pkgs):
|
||||
# TODO: Remove the sorting once we're sure noone has an old
|
||||
# TODO: tag cache since it can accumulate duplicates.
|
||||
packages = sorted(list(set(tag_pkgs[tag])))
|
||||
if isatty:
|
||||
buffer.write("{0}:\n".format(tag))
|
||||
|
||||
if packages:
|
||||
colify.colify(packages, output=buffer, tty=isatty, indent=4)
|
||||
else:
|
||||
buffer.write(" {0}\n".format(missing))
|
||||
buffer.write("\n")
|
||||
print(buffer.getvalue())
|
@ -17,6 +17,7 @@
|
||||
default_view_name,
|
||||
display_specs,
|
||||
exists,
|
||||
installed_specs,
|
||||
is_env_dir,
|
||||
is_latest_format,
|
||||
lockfile_name,
|
||||
@ -44,6 +45,7 @@
|
||||
'default_view_name',
|
||||
'display_specs',
|
||||
'exists',
|
||||
'installed_specs',
|
||||
'is_env_dir',
|
||||
'is_latest_format',
|
||||
'lockfile_name',
|
||||
|
@ -102,6 +102,16 @@
|
||||
default_view_link = 'all'
|
||||
|
||||
|
||||
def installed_specs():
|
||||
"""
|
||||
Returns the specs of packages installed in the active environment or None
|
||||
if no packages are installed.
|
||||
"""
|
||||
env = spack.environment.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
return spack.store.db.query(hashes=hashes)
|
||||
|
||||
|
||||
def valid_env_name(name):
|
||||
return re.match(valid_environment_name_re, name)
|
||||
|
||||
|
@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import contextlib
|
||||
import errno
|
||||
import functools
|
||||
@ -38,10 +37,10 @@
|
||||
import spack.patch
|
||||
import spack.provider_index
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.imp as simp
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
#: Super-namespace for all packages.
|
||||
#: Package modules are imported as spack.pkg.<namespace>.<pkg-name>.
|
||||
@ -219,55 +218,6 @@ def __len__(self):
|
||||
return len(self._packages_to_stats)
|
||||
|
||||
|
||||
class TagIndex(Mapping):
|
||||
"""Maps tags to list of packages."""
|
||||
|
||||
def __init__(self):
|
||||
self._tag_dict = collections.defaultdict(list)
|
||||
|
||||
def to_json(self, stream):
|
||||
sjson.dump({'tags': self._tag_dict}, stream)
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
d = sjson.load(stream)
|
||||
|
||||
r = TagIndex()
|
||||
|
||||
for tag, list in d['tags'].items():
|
||||
r[tag].extend(list)
|
||||
|
||||
return r
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self._tag_dict[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._tag_dict)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._tag_dict)
|
||||
|
||||
def update_package(self, pkg_name):
|
||||
"""Updates a package in the tag index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
package = path.get(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
if pkg_name in pkg_list:
|
||||
pkg_list.remove(pkg_name)
|
||||
|
||||
# Add it again under the appropriate tags
|
||||
for tag in getattr(package, 'tags', []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(package.name)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Indexer(object):
|
||||
"""Adaptor for indexes that need to be generated when repos are updated."""
|
||||
@ -311,10 +261,10 @@ def write(self, stream):
|
||||
class TagIndexer(Indexer):
|
||||
"""Lifecycle methods for a TagIndex on a Repo."""
|
||||
def _create(self):
|
||||
return TagIndex()
|
||||
return spack.tag.TagIndex()
|
||||
|
||||
def read(self, stream):
|
||||
self.index = TagIndex.from_json(stream)
|
||||
self.index = spack.tag.TagIndex.from_json(stream)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname)
|
||||
@ -475,6 +425,7 @@ def __init__(self, *repos):
|
||||
|
||||
self._provider_index = None
|
||||
self._patch_index = None
|
||||
self._tag_index = None
|
||||
|
||||
# Add each repo to this path.
|
||||
for repo in repos:
|
||||
@ -579,6 +530,16 @@ def provider_index(self):
|
||||
|
||||
return self._provider_index
|
||||
|
||||
@property
|
||||
def tag_index(self):
|
||||
"""Merged TagIndex from all Repos in the RepoPath."""
|
||||
if self._tag_index is None:
|
||||
self._tag_index = spack.tag.TagIndex()
|
||||
for repo in reversed(self.repos):
|
||||
self._tag_index.merge(repo.tag_index)
|
||||
|
||||
return self._tag_index
|
||||
|
||||
@property
|
||||
def patch_index(self):
|
||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||
|
135
lib/spack/spack/tag.py
Normal file
135
lib/spack/spack/tag.py
Normal file
@ -0,0 +1,135 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to manage package tags"""
|
||||
import collections
|
||||
import copy
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 5):
|
||||
from collections.abc import Mapping # novm
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
import spack.error
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
|
||||
def _get_installed_package_names():
|
||||
"""Returns names of packages installed in the active environment."""
|
||||
specs = spack.environment.installed_specs()
|
||||
return [spec.name for spec in specs]
|
||||
|
||||
|
||||
def packages_with_tags(tags, installed, skip_empty):
|
||||
"""
|
||||
Returns a dict, indexed by tag, containing lists of names of packages
|
||||
containing the tag or, if no tags, for all available tags.
|
||||
|
||||
Arguments:
|
||||
tags (list or None): list of tags of interest or None for all
|
||||
installed (bool): True if want names of packages that are installed;
|
||||
otherwise, False if want all packages with the tag
|
||||
skip_empty (bool): True if exclude tags with no associated packages;
|
||||
otherwise, False if want entries for all tags even when no such
|
||||
tagged packages
|
||||
"""
|
||||
tag_pkgs = collections.defaultdict(lambda: list)
|
||||
spec_names = _get_installed_package_names() if installed else []
|
||||
keys = spack.repo.path.tag_index if tags is None else tags
|
||||
for tag in keys:
|
||||
packages = [name for name in spack.repo.path.tag_index[tag] if
|
||||
not installed or name in spec_names]
|
||||
if packages or not skip_empty:
|
||||
tag_pkgs[tag] = packages
|
||||
return tag_pkgs
|
||||
|
||||
|
||||
class TagIndex(Mapping):
|
||||
"""Maps tags to list of packages."""
|
||||
|
||||
def __init__(self):
|
||||
self._tag_dict = collections.defaultdict(list)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
return self._tag_dict
|
||||
|
||||
def to_json(self, stream):
|
||||
sjson.dump({'tags': self._tag_dict}, stream)
|
||||
|
||||
@staticmethod
|
||||
def from_json(stream):
|
||||
d = sjson.load(stream)
|
||||
|
||||
if not isinstance(d, dict):
|
||||
raise TagIndexError("TagIndex data was not a dict.")
|
||||
|
||||
if 'tags' not in d:
|
||||
raise TagIndexError("TagIndex data does not start with 'tags'")
|
||||
|
||||
r = TagIndex()
|
||||
|
||||
for tag, packages in d['tags'].items():
|
||||
r[tag].extend(packages)
|
||||
|
||||
return r
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self._tag_dict[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._tag_dict)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._tag_dict)
|
||||
|
||||
def copy(self):
|
||||
"""Return a deep copy of this index."""
|
||||
clone = TagIndex()
|
||||
clone._tag_dict = copy.deepcopy(self._tag_dict)
|
||||
return clone
|
||||
|
||||
def get_packages(self, tag):
|
||||
"""Returns all packages associated with the tag."""
|
||||
return self.tags[tag] if tag in self.tags else []
|
||||
|
||||
def merge(self, other):
|
||||
"""Merge another tag index into this one.
|
||||
|
||||
Args:
|
||||
other (TagIndex): tag index to be merged
|
||||
"""
|
||||
other = other.copy() # defensive copy.
|
||||
|
||||
for tag in other.tags:
|
||||
if tag not in self.tags:
|
||||
self.tags[tag] = other.tags[tag]
|
||||
continue
|
||||
|
||||
spkgs, opkgs = self.tags[tag], other.tags[tag]
|
||||
self.tags[tag] = sorted(list(set(spkgs + opkgs)))
|
||||
|
||||
def update_package(self, pkg_name):
|
||||
"""Updates a package in the tag index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
package = spack.repo.path.get(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
if pkg_name in pkg_list:
|
||||
pkg_list.remove(pkg_name)
|
||||
|
||||
# Add it again under the appropriate tags
|
||||
for tag in getattr(package, 'tags', []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(package.name)
|
||||
|
||||
|
||||
class TagIndexError(spack.error.SpackError):
|
||||
"""Raised when there is a problem with a TagIndex."""
|
@ -35,20 +35,6 @@ def test_list_search_description(mock_packages):
|
||||
assert 'depb' in output
|
||||
|
||||
|
||||
def test_list_tags(mock_packages):
|
||||
output = list('--tag', 'tag1')
|
||||
assert 'mpich' in output
|
||||
assert 'mpich2' in output
|
||||
|
||||
output = list('--tag', 'tag2')
|
||||
assert 'mpich\n' in output
|
||||
assert 'mpich2' not in output
|
||||
|
||||
output = list('--tag', 'tag3')
|
||||
assert 'mpich\n' not in output
|
||||
assert 'mpich2' in output
|
||||
|
||||
|
||||
def test_list_format_name_only(mock_packages):
|
||||
output = list('--format', 'name_only')
|
||||
assert 'zmpi' in output
|
||||
|
62
lib/spack/spack/test/cmd/tags.py
Normal file
62
lib/spack/spack/test/cmd/tags.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
tags = spack.main.SpackCommand('tags')
|
||||
|
||||
|
||||
def test_tags_bad_options():
|
||||
out = tags('-a', 'tag1', fail_on_error=False)
|
||||
assert "option OR provide" in out
|
||||
|
||||
|
||||
def test_tags_no_installed(install_mockery, mock_fetch):
|
||||
out = tags('-i')
|
||||
assert 'No installed' in out
|
||||
|
||||
|
||||
def test_tags_invalid_tag(mock_packages):
|
||||
out = tags('nosuchtag')
|
||||
assert 'None' in out
|
||||
|
||||
|
||||
def test_tags_all_mock_tags(mock_packages):
|
||||
out = tags()
|
||||
for tag in ['tag1', 'tag2', 'tag3']:
|
||||
assert tag in out
|
||||
|
||||
|
||||
def test_tags_all_mock_tag_packages(mock_packages):
|
||||
out = tags('-a')
|
||||
for pkg in ['mpich\n', 'mpich2\n']:
|
||||
assert pkg in out
|
||||
|
||||
|
||||
def test_tags_no_tags(monkeypatch):
|
||||
class tag_path():
|
||||
tag_index = dict()
|
||||
|
||||
monkeypatch.setattr(spack.repo, 'path', tag_path)
|
||||
out = tags()
|
||||
assert "No tagged" in out
|
||||
|
||||
|
||||
def test_tags_installed(install_mockery, mock_fetch):
|
||||
spec = spack.spec.Spec('mpich').concretized()
|
||||
pkg = spack.repo.get(spec)
|
||||
pkg.do_install()
|
||||
|
||||
out = tags('-i')
|
||||
for tag in ['tag1', 'tag2']:
|
||||
assert tag in out
|
||||
|
||||
out = tags('-i', 'tag1')
|
||||
assert 'mpich' in out
|
||||
|
||||
out = tags('-i', 'tag3')
|
||||
assert 'No installed' in out
|
160
lib/spack/spack/test/tag.py
Normal file
160
lib/spack/spack/test/tag.py
Normal file
@ -0,0 +1,160 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Tests for tag index cache files."""
|
||||
|
||||
import pytest
|
||||
from six import StringIO
|
||||
|
||||
import spack.cmd.install
|
||||
import spack.tag
|
||||
from spack.main import SpackCommand
|
||||
|
||||
install = SpackCommand('install')
|
||||
|
||||
# Alternate representation
|
||||
tags_json = \
|
||||
"""
|
||||
{
|
||||
"tags": {
|
||||
"no-version": [
|
||||
"noversion",
|
||||
"noversion-bundle"
|
||||
],
|
||||
"no-source": [
|
||||
"nosource"
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
more_tags_json = \
|
||||
"""
|
||||
{
|
||||
"tags": {
|
||||
"merge": [
|
||||
"check"
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def test_tag_copy(mock_packages):
|
||||
index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
new_index = index.copy()
|
||||
|
||||
assert index.tags == new_index.tags
|
||||
|
||||
|
||||
def test_tag_get_all_available(mock_packages):
|
||||
for skip in [False, True]:
|
||||
all_pkgs = spack.tag.packages_with_tags(None, False, skip)
|
||||
assert sorted(all_pkgs['tag1']) == ['mpich', 'mpich2']
|
||||
assert all_pkgs['tag2'] == ['mpich']
|
||||
assert all_pkgs['tag3'] == ['mpich2']
|
||||
|
||||
|
||||
def ensure_tags_results_equal(results, expected):
|
||||
if expected:
|
||||
assert sorted(results.keys()) == sorted(expected.keys())
|
||||
for tag in results:
|
||||
assert sorted(results[tag]) == sorted(expected[tag])
|
||||
else:
|
||||
assert results == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('tags,expected', [
|
||||
(['tag1'], {'tag1': ['mpich', 'mpich2']}),
|
||||
(['tag2'], {'tag2': ['mpich']}),
|
||||
(['tag3'], {'tag3': ['mpich2']}),
|
||||
(['nosuchpackage'], {'nosuchpackage': {}}),
|
||||
])
|
||||
def test_tag_get_available(tags, expected, mock_packages):
|
||||
# Ensure results for all tags
|
||||
all_tag_pkgs = spack.tag.packages_with_tags(tags, False, False)
|
||||
ensure_tags_results_equal(all_tag_pkgs, expected)
|
||||
|
||||
# Ensure results for tags expecting results since skipping otherwise
|
||||
only_pkgs = spack.tag.packages_with_tags(tags, False, True)
|
||||
if expected[tags[0]]:
|
||||
ensure_tags_results_equal(only_pkgs, expected)
|
||||
else:
|
||||
assert not only_pkgs
|
||||
|
||||
|
||||
def test_tag_get_installed_packages(
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
install('mpich')
|
||||
|
||||
for skip in [False, True]:
|
||||
all_pkgs = spack.tag.packages_with_tags(None, True, skip)
|
||||
assert sorted(all_pkgs['tag1']) == ['mpich']
|
||||
assert all_pkgs['tag2'] == ['mpich']
|
||||
assert skip or all_pkgs['tag3'] == []
|
||||
|
||||
|
||||
def test_tag_index_round_trip(mock_packages):
|
||||
# Assumes at least two packages -- mpich and mpich2 -- have tags
|
||||
mock_index = spack.repo.path.tag_index
|
||||
assert mock_index.tags
|
||||
|
||||
ostream = StringIO()
|
||||
mock_index.to_json(ostream)
|
||||
|
||||
istream = StringIO(ostream.getvalue())
|
||||
new_index = spack.tag.TagIndex.from_json(istream)
|
||||
|
||||
assert mock_index == new_index
|
||||
|
||||
|
||||
def test_tag_equal():
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
|
||||
assert first_index == second_index
|
||||
|
||||
|
||||
def test_tag_merge():
|
||||
first_index = spack.tag.TagIndex.from_json(StringIO(tags_json))
|
||||
second_index = spack.tag.TagIndex.from_json(StringIO(more_tags_json))
|
||||
|
||||
assert first_index != second_index
|
||||
|
||||
tags1 = list(first_index.tags.keys())
|
||||
tags2 = list(second_index.tags.keys())
|
||||
all_tags = sorted(list(set(tags1 + tags2)))
|
||||
|
||||
first_index.merge(second_index)
|
||||
tag_keys = sorted(first_index.tags.keys())
|
||||
assert tag_keys == all_tags
|
||||
|
||||
# Merge again to make sure the index does not retain duplicates
|
||||
first_index.merge(second_index)
|
||||
tag_keys = sorted(first_index.tags.keys())
|
||||
assert tag_keys == all_tags
|
||||
|
||||
|
||||
def test_tag_not_dict():
|
||||
list_json = "[]"
|
||||
with pytest.raises(spack.tag.TagIndexError) as e:
|
||||
spack.tag.TagIndex.from_json(StringIO(list_json))
|
||||
assert "not a dict" in str(e)
|
||||
|
||||
|
||||
def test_tag_no_tags():
|
||||
pkg_json = "{\"packages\": []}"
|
||||
with pytest.raises(spack.tag.TagIndexError) as e:
|
||||
spack.tag.TagIndex.from_json(StringIO(pkg_json))
|
||||
assert "does not start with" in str(e)
|
||||
|
||||
|
||||
def test_tag_update_package(mock_packages):
|
||||
mock_index = spack.repo.path.tag_index
|
||||
|
||||
index = spack.tag.TagIndex()
|
||||
for name in spack.repo.all_package_names():
|
||||
index.update_package(name)
|
||||
|
||||
ensure_tags_results_equal(mock_index.tags, index.tags)
|
@ -337,7 +337,7 @@ _spack() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -1206,7 +1206,7 @@ _spack_license_update_copyright_year() {
|
||||
_spack_list() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --search-description --format --update -v --virtuals -t --tag"
|
||||
SPACK_COMPREPLY="-h --help -d --search-description --format --update -v --virtuals"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@ -1668,6 +1668,15 @@ _spack_style() {
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_tags() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -i --installed -a --all"
|
||||
else
|
||||
SPACK_COMPREPLY=""
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_test() {
|
||||
if $list_options
|
||||
then
|
||||
|
@ -11,7 +11,7 @@ class Jags(AutotoolsPackage):
|
||||
Bayesian hierarchical models using Markov Chain Monte Carlo (MCMC)
|
||||
simulation not wholly unlike BUGS"""
|
||||
|
||||
tags = ['mcmc', 'Gibbs sampler']
|
||||
tags = ['mcmc', 'Gibbs-sampler']
|
||||
|
||||
homepage = "http://mcmc-jags.sourceforge.net/"
|
||||
url = "https://downloads.sourceforge.net/project/mcmc-jags/JAGS/4.x/Source/JAGS-4.2.0.tar.gz"
|
||||
|
Loading…
Reference in New Issue
Block a user