Rename spack.db -> spack.repo
Making distinction between install database and package repositories clearer.
This commit is contained in:
parent
e6d232bfef
commit
04f032d6e3
@ -58,8 +58,8 @@
|
||||
_repo_paths = spack.config.get_repos_config()
|
||||
if not _repo_paths:
|
||||
tty.die("Spack configuration contains no package repositories.")
|
||||
db = spack.repository.RepoPath(*_repo_paths)
|
||||
sys.meta_path.append(db)
|
||||
repo = spack.repository.RepoPath(*_repo_paths)
|
||||
sys.meta_path.append(repo)
|
||||
|
||||
#
|
||||
# Set up the installed packages database
|
||||
@ -146,7 +146,7 @@
|
||||
# When packages call 'from spack import *', this extra stuff is brought in.
|
||||
#
|
||||
# Spack internal code should call 'import spack' and accesses other
|
||||
# variables (spack.db, paths, etc.) directly.
|
||||
# variables (spack.repo, paths, etc.) directly.
|
||||
#
|
||||
# TODO: maybe this should be separated out and should go in build_environment.py?
|
||||
# TODO: it's not clear where all the stuff that needs to be included in packages
|
||||
|
@ -81,7 +81,7 @@ def get_checksums(versions, urls, **kwargs):
|
||||
|
||||
def checksum(parser, args):
|
||||
# get the package we're going to generate checksums for
|
||||
pkg = spack.db.get(args.package)
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
# If the user asked for specific versions, use those.
|
||||
if args.versions:
|
||||
|
@ -42,5 +42,5 @@ def clean(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
package.do_clean()
|
||||
|
@ -172,7 +172,7 @@ def create(parser, args):
|
||||
tty.msg("Creating template for package %s" % name)
|
||||
|
||||
# Create a directory for the new package.
|
||||
pkg_path = spack.db.filename_for_package_name(name, package_repo)
|
||||
pkg_path = spack.repo.filename_for_package_name(name, package_repo)
|
||||
if os.path.exists(pkg_path) and not args.force:
|
||||
tty.die("%s already exists." % pkg_path)
|
||||
else:
|
||||
@ -214,7 +214,7 @@ def create(parser, args):
|
||||
name = 'py-%s' % name
|
||||
|
||||
# Create a directory for the new package.
|
||||
pkg_path = spack.db.filename_for_package_name(name)
|
||||
pkg_path = spack.repo.filename_for_package_name(name)
|
||||
if os.path.exists(pkg_path) and not args.force:
|
||||
tty.die("%s already exists." % pkg_path)
|
||||
else:
|
||||
|
@ -61,7 +61,7 @@ def diy(self, args):
|
||||
# Take a write lock before checking for existence.
|
||||
with spack.installed_db.write_transaction():
|
||||
spec = specs[0]
|
||||
if not spack.db.exists(spec.name):
|
||||
if not spack.repo.exists(spec.name):
|
||||
tty.warn("No such package: %s" % spec.name)
|
||||
create = tty.get_yes_or_no("Create this package?", default=False)
|
||||
if not create:
|
||||
@ -76,7 +76,7 @@ def diy(self, args):
|
||||
tty.die("spack diy spec must have a single, concrete version.")
|
||||
|
||||
spec.concretize()
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
|
@ -54,7 +54,7 @@ def install(self, spec, prefix):
|
||||
|
||||
|
||||
def edit_package(name, force=False):
|
||||
path = spack.db.filename_for_package_name(name)
|
||||
path = spack.repo.filename_for_package_name(name)
|
||||
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
|
@ -71,7 +71,7 @@ def extensions(parser, args):
|
||||
args.mode = 'short'
|
||||
|
||||
# List package names of extensions
|
||||
extensions = spack.db.extensions_for(spec)
|
||||
extensions = spack.repo.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
return
|
||||
|
@ -46,5 +46,5 @@ def fetch(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
package.do_fetch()
|
||||
|
@ -137,7 +137,7 @@ def find(parser, args):
|
||||
# Filter out specs that don't exist.
|
||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||
query_specs, nonexisting = partition_list(
|
||||
query_specs, lambda s: spack.db.exists(s.name))
|
||||
query_specs, lambda s: spack.repo.exists(s.name))
|
||||
|
||||
if nonexisting:
|
||||
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
||||
|
@ -105,5 +105,5 @@ def print_text_info(pkg):
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
pkg = spack.db.get(args.name)
|
||||
pkg = spack.repo.get(args.name)
|
||||
print_text_info(pkg)
|
||||
|
@ -70,7 +70,7 @@ def install(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
with spack.installed_db.write_transaction():
|
||||
package.do_install(
|
||||
keep_prefix=args.keep_prefix,
|
||||
|
@ -43,7 +43,7 @@ def setup_parser(subparser):
|
||||
|
||||
def list(parser, args):
|
||||
# Start with all package names.
|
||||
pkgs = spack.db.all_package_names()
|
||||
pkgs = spack.repo.all_package_names()
|
||||
|
||||
# filter if a filter arg was provided
|
||||
if args.filter:
|
||||
|
@ -72,7 +72,7 @@ def location(parser, args):
|
||||
print spack.prefix
|
||||
|
||||
elif args.packages:
|
||||
print spack.db.root
|
||||
print spack.repo.root
|
||||
|
||||
elif args.stages:
|
||||
print spack.stage_path
|
||||
@ -94,12 +94,12 @@ def location(parser, args):
|
||||
|
||||
if args.package_dir:
|
||||
# This one just needs the spec name.
|
||||
print join_path(spack.db.root, spec.name)
|
||||
print join_path(spack.repo.root, spec.name)
|
||||
|
||||
else:
|
||||
# These versions need concretized specs.
|
||||
spec.concretize()
|
||||
pkg = spack.db.get(spec)
|
||||
pkg = spack.repo.get(spec)
|
||||
|
||||
if args.stage_dir:
|
||||
print pkg.stage.path
|
||||
|
@ -128,7 +128,7 @@ def mirror_create(args):
|
||||
|
||||
# If nothing is passed, use all packages.
|
||||
if not specs:
|
||||
specs = [Spec(n) for n in spack.db.all_package_names()]
|
||||
specs = [Spec(n) for n in spack.repo.all_package_names()]
|
||||
specs.sort(key=lambda s: s.format("$_$@").lower())
|
||||
|
||||
# Default name for directory is spack-mirror-<DATESTAMP>
|
||||
|
@ -48,7 +48,7 @@ def rst_table(elts):
|
||||
|
||||
def print_rst_package_list():
|
||||
"""Print out information on all packages in restructured text."""
|
||||
pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
|
||||
pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
|
||||
|
||||
print ".. _package-list:"
|
||||
print
|
||||
|
@ -47,5 +47,5 @@ def patch(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
package.do_patch()
|
||||
|
@ -85,7 +85,7 @@ def list_packages(rev):
|
||||
|
||||
def pkg_add(args):
|
||||
for pkg_name in args.packages:
|
||||
filename = spack.db.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
|
@ -39,4 +39,4 @@ def setup_parser(subparser):
|
||||
|
||||
def providers(parser, args):
|
||||
for spec in spack.cmd.parse_specs(args.vpkg_spec):
|
||||
colify(sorted(spack.db.providers_for(spec)), indent=4)
|
||||
colify(sorted(spack.repo.providers_for(spec)), indent=4)
|
||||
|
@ -108,7 +108,7 @@ def repo_remove(args):
|
||||
|
||||
def repo_list(args):
|
||||
"""List package sources and their mnemoics"""
|
||||
root_names = spack.db.repos
|
||||
root_names = spack.repo.repos
|
||||
max_len = max(len(s[0]) for s in root_names)
|
||||
fmt = "%%-%ds%%s" % (max_len + 4)
|
||||
for root in root_names:
|
||||
|
@ -42,5 +42,5 @@ def restage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
package.do_restage()
|
||||
|
@ -49,5 +49,5 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
package.do_stage()
|
||||
|
@ -115,7 +115,7 @@ def fetch_log(path):
|
||||
|
||||
def failed_dependencies(spec):
|
||||
return set(childSpec for childSpec in spec.dependencies.itervalues() if not
|
||||
spack.db.get(childSpec).installed)
|
||||
spack.repo.get(childSpec).installed)
|
||||
|
||||
|
||||
def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
||||
@ -126,7 +126,7 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
||||
continue
|
||||
|
||||
failedDeps = failed_dependencies(spec)
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
if failedDeps:
|
||||
result = TestResult.SKIPPED
|
||||
dep = iter(failedDeps).next()
|
||||
@ -171,7 +171,7 @@ def test_install(parser, args):
|
||||
|
||||
newInstalls = set()
|
||||
for spec in topSpec.traverse():
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
if not package.installed:
|
||||
newInstalls.add(spec)
|
||||
|
||||
@ -188,7 +188,7 @@ def test_install(parser, args):
|
||||
# Calling do_install for the top-level package would be sufficient but
|
||||
# this attempts to keep going if any package fails (other packages which
|
||||
# are not dependents may succeed)
|
||||
package = spack.db.get(spec)
|
||||
package = spack.repo.get(spec)
|
||||
if (not failed_dependencies(spec)) and (not package.installed):
|
||||
try:
|
||||
package.do_install(
|
||||
|
@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||
|
||||
def urls(parser, args):
|
||||
urls = set()
|
||||
for pkg in spack.db.all_packages():
|
||||
for pkg in spack.repo.all_packages():
|
||||
url = getattr(pkg.__class__, 'url', None)
|
||||
if url:
|
||||
urls.add(url)
|
||||
|
@ -34,7 +34,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
pkg = spack.db.get(args.package)
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
fetched_versions = pkg.fetch_remote_versions()
|
||||
|
@ -549,7 +549,7 @@ def query(self, query_spec=any, known=any, installed=True):
|
||||
for key, rec in self._data.items():
|
||||
if installed is not any and rec.installed != installed:
|
||||
continue
|
||||
if known is not any and spack.db.exists(rec.spec.name) != known:
|
||||
if known is not any and spack.repo.exists(rec.spec.name) != known:
|
||||
continue
|
||||
if query_spec is any or rec.spec.satisfies(query_spec):
|
||||
results.append(rec.spec)
|
||||
|
@ -523,7 +523,7 @@ def quote(string):
|
||||
return '"%s"' % string
|
||||
|
||||
if not specs:
|
||||
specs = [p.name for p in spack.db.all_packages()]
|
||||
specs = [p.name for p in spack.repo.all_packages()]
|
||||
else:
|
||||
roots = specs
|
||||
specs = set()
|
||||
|
@ -372,7 +372,7 @@ def __init__(self, spec):
|
||||
self._total_time = 0.0
|
||||
|
||||
if self.is_extension:
|
||||
spack.db.get(self.extendee_spec)._check_extendable()
|
||||
spack.repo.get(self.extendee_spec)._check_extendable()
|
||||
|
||||
|
||||
@property
|
||||
@ -548,7 +548,7 @@ def preorder_traversal(self, visited=None, **kwargs):
|
||||
yield spec
|
||||
continue
|
||||
|
||||
for pkg in spack.db.get(name).preorder_traversal(visited, **kwargs):
|
||||
for pkg in spack.repo.get(name).preorder_traversal(visited, **kwargs):
|
||||
yield pkg
|
||||
|
||||
|
||||
|
@ -54,7 +54,7 @@ def __init__(self, pkg, path_or_url, level):
|
||||
if '://' in path_or_url:
|
||||
self.url = path_or_url
|
||||
else:
|
||||
pkg_dir = spack.db.dirname_for_package_name(self.pkg_name)
|
||||
pkg_dir = spack.repo.dirname_for_package_name(self.pkg_name)
|
||||
self.path = join_path(pkg_dir, path_or_url)
|
||||
if not os.path.isfile(self.path):
|
||||
raise NoSuchPatchFileError(pkg_name, self.path)
|
||||
|
@ -487,7 +487,7 @@ def root(self):
|
||||
|
||||
@property
|
||||
def package(self):
|
||||
return spack.db.get(self)
|
||||
return spack.repo.get(self)
|
||||
|
||||
|
||||
@property
|
||||
@ -505,7 +505,7 @@ def virtual(self):
|
||||
@staticmethod
|
||||
def is_virtual(name):
|
||||
"""Test if a name is virtual without requiring a Spec."""
|
||||
return not spack.db.exists(name)
|
||||
return not spack.repo.exists(name)
|
||||
|
||||
|
||||
@property
|
||||
@ -798,7 +798,7 @@ def _expand_virtual_packages(self):
|
||||
return changed
|
||||
|
||||
for spec in virtuals:
|
||||
providers = spack.db.providers_for(spec)
|
||||
providers = spack.repo.providers_for(spec)
|
||||
concrete = spack.concretizer.choose_provider(spec, providers)
|
||||
concrete = concrete.copy()
|
||||
spec._replace_with(concrete)
|
||||
@ -909,7 +909,7 @@ def _evaluate_dependency_conditions(self, name):
|
||||
the dependency. If no conditions are True (and we don't
|
||||
depend on it), return None.
|
||||
"""
|
||||
pkg = spack.db.get(self.name)
|
||||
pkg = spack.repo.get(self.name)
|
||||
conditions = pkg.dependencies[name]
|
||||
|
||||
# evaluate when specs to figure out constraints on the dependency.
|
||||
@ -1037,7 +1037,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||
any_change = False
|
||||
changed = True
|
||||
|
||||
pkg = spack.db.get(self.name)
|
||||
pkg = spack.repo.get(self.name)
|
||||
while changed:
|
||||
changed = False
|
||||
for dep_name in pkg.dependencies:
|
||||
@ -1115,7 +1115,7 @@ def validate_names(self):
|
||||
for spec in self.traverse():
|
||||
# Don't get a package for a virtual name.
|
||||
if not spec.virtual:
|
||||
spack.db.get(spec.name)
|
||||
spack.repo.get(spec.name)
|
||||
|
||||
# validate compiler in addition to the package name.
|
||||
if spec.compiler:
|
||||
@ -1247,7 +1247,7 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
pkg = spack.db.get(self.name)
|
||||
pkg = spack.repo.get(self.name)
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_spec in pkg.provided.items():
|
||||
if self.satisfies(when_spec, deps=False, strict=strict):
|
||||
|
@ -125,22 +125,22 @@ def test_concretize_with_provides_when(self):
|
||||
we ask for some advanced version.
|
||||
"""
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
|
||||
for spec in spack.db.providers_for('mpi@2.1')))
|
||||
for spec in spack.repo.providers_for('mpi@2.1')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in spack.db.providers_for('mpi@2.2')))
|
||||
for spec in spack.repo.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in spack.db.providers_for('mpi@2.2')))
|
||||
for spec in spack.repo.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in spack.db.providers_for('mpi@2')))
|
||||
for spec in spack.repo.providers_for('mpi@2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in spack.db.providers_for('mpi@3')))
|
||||
for spec in spack.repo.providers_for('mpi@3')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2')
|
||||
for spec in spack.db.providers_for('mpi@3')))
|
||||
for spec in spack.repo.providers_for('mpi@3')))
|
||||
|
||||
|
||||
def test_virtual_is_fully_expanded_for_callpath(self):
|
||||
|
@ -79,7 +79,7 @@ class DatabaseTest(MockPackagesTest):
|
||||
|
||||
def _mock_install(self, spec):
|
||||
s = Spec(spec)
|
||||
pkg = spack.db.get(s.concretized())
|
||||
pkg = spack.repo.get(s.concretized())
|
||||
pkg.do_install(fake=True)
|
||||
|
||||
|
||||
|
@ -62,7 +62,7 @@ def test_read_and_write_spec(self):
|
||||
finally that the directory can be removed by the directory
|
||||
layout.
|
||||
"""
|
||||
packages = list(spack.db.all_packages())[:max_packages]
|
||||
packages = list(spack.repo.all_packages())[:max_packages]
|
||||
|
||||
for pkg in packages:
|
||||
spec = pkg.spec
|
||||
@ -126,14 +126,14 @@ def test_handle_unknown_package(self):
|
||||
mock_db = RepoPath(spack.mock_packages_path)
|
||||
|
||||
not_in_mock = set.difference(
|
||||
set(spack.db.all_package_names()),
|
||||
set(spack.repo.all_package_names()),
|
||||
set(mock_db.all_package_names()))
|
||||
packages = list(not_in_mock)[:max_packages]
|
||||
|
||||
# Create all the packages that are not in mock.
|
||||
installed_specs = {}
|
||||
for pkg_name in packages:
|
||||
spec = spack.db.get(pkg_name).spec
|
||||
spec = spack.repo.get(pkg_name).spec
|
||||
|
||||
# If a spec fails to concretize, just skip it. If it is a
|
||||
# real error, it will be caught by concretization tests.
|
||||
@ -145,7 +145,7 @@ def test_handle_unknown_package(self):
|
||||
self.layout.create_install_directory(spec)
|
||||
installed_specs[spec] = self.layout.path_for_spec(spec)
|
||||
|
||||
spack.db.swap(mock_db)
|
||||
spack.repo.swap(mock_db)
|
||||
|
||||
# Now check that even without the package files, we know
|
||||
# enough to read a spec from the spec file.
|
||||
@ -160,12 +160,12 @@ def test_handle_unknown_package(self):
|
||||
self.assertTrue(spec.eq_dag(spec_from_file))
|
||||
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
|
||||
|
||||
spack.db.swap(mock_db)
|
||||
spack.repo.swap(mock_db)
|
||||
|
||||
|
||||
def test_find(self):
|
||||
"""Test that finding specs within an install layout works."""
|
||||
packages = list(spack.db.all_packages())[:max_packages]
|
||||
packages = list(spack.repo.all_packages())[:max_packages]
|
||||
|
||||
# Create install prefixes for all packages in the list
|
||||
installed_specs = {}
|
||||
|
@ -50,7 +50,7 @@ def setUp(self):
|
||||
|
||||
spec = Spec('git-test')
|
||||
spec.concretize()
|
||||
self.pkg = spack.db.get(spec, new=True)
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -47,7 +47,7 @@ def setUp(self):
|
||||
|
||||
spec = Spec('hg-test')
|
||||
spec.concretize()
|
||||
self.pkg = spack.db.get(spec, new=True)
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -78,7 +78,7 @@ def test_install_and_uninstall(self):
|
||||
self.assertTrue(spec.concrete)
|
||||
|
||||
# Get the package
|
||||
pkg = spack.db.get(spec)
|
||||
pkg = spack.repo.get(spec)
|
||||
|
||||
# Fake the URL for the package so it downloads from a file.
|
||||
pkg.fetcher = URLFetchStrategy(self.repo.url)
|
||||
|
@ -55,7 +55,7 @@ def set_up_package(self, name, MockRepoClass, url_attr):
|
||||
spec.concretize()
|
||||
|
||||
# Get the package and fix its fetch args to point to a mock repo
|
||||
pkg = spack.db.get(spec)
|
||||
pkg = spack.repo.get(spec)
|
||||
repo = MockRepoClass()
|
||||
self.repos[name] = repo
|
||||
|
||||
|
@ -37,7 +37,7 @@ def initmock(self):
|
||||
# us to set up contrived packages that don't interfere with
|
||||
# real ones.
|
||||
self.db = RepoPath(spack.mock_packages_path)
|
||||
spack.db.swap(self.db)
|
||||
spack.repo.swap(self.db)
|
||||
|
||||
spack.config.clear_config_caches()
|
||||
self.real_scopes = spack.config.config_scopes
|
||||
@ -59,7 +59,7 @@ def set_pkg_dep(self, pkg_name, spec):
|
||||
spec = Spec(spec)
|
||||
|
||||
# Save original dependencies before making any changes.
|
||||
pkg = spack.db.get(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
if pkg_name not in self.saved_deps:
|
||||
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
|
||||
|
||||
@ -69,7 +69,7 @@ def set_pkg_dep(self, pkg_name, spec):
|
||||
|
||||
def cleanmock(self):
|
||||
"""Restore the real packages path after any test."""
|
||||
spack.db.swap(self.db)
|
||||
spack.repo.swap(self.db)
|
||||
spack.config.config_scopes = self.real_scopes
|
||||
spack.config.clear_config_caches()
|
||||
|
||||
|
@ -38,92 +38,92 @@
|
||||
class MultiMethodTest(MockPackagesTest):
|
||||
|
||||
def test_no_version_match(self):
|
||||
pkg = spack.db.get('multimethod@2.0')
|
||||
pkg = spack.repo.get('multimethod@2.0')
|
||||
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
|
||||
|
||||
|
||||
def test_one_version_match(self):
|
||||
pkg = spack.db.get('multimethod@1.0')
|
||||
pkg = spack.repo.get('multimethod@1.0')
|
||||
self.assertEqual(pkg.no_version_2(), 1)
|
||||
|
||||
pkg = spack.db.get('multimethod@3.0')
|
||||
pkg = spack.repo.get('multimethod@3.0')
|
||||
self.assertEqual(pkg.no_version_2(), 3)
|
||||
|
||||
pkg = spack.db.get('multimethod@4.0')
|
||||
pkg = spack.repo.get('multimethod@4.0')
|
||||
self.assertEqual(pkg.no_version_2(), 4)
|
||||
|
||||
|
||||
def test_version_overlap(self):
|
||||
pkg = spack.db.get('multimethod@2.0')
|
||||
pkg = spack.repo.get('multimethod@2.0')
|
||||
self.assertEqual(pkg.version_overlap(), 1)
|
||||
|
||||
pkg = spack.db.get('multimethod@5.0')
|
||||
pkg = spack.repo.get('multimethod@5.0')
|
||||
self.assertEqual(pkg.version_overlap(), 2)
|
||||
|
||||
|
||||
def test_mpi_version(self):
|
||||
pkg = spack.db.get('multimethod^mpich@3.0.4')
|
||||
pkg = spack.repo.get('multimethod^mpich@3.0.4')
|
||||
self.assertEqual(pkg.mpi_version(), 3)
|
||||
|
||||
pkg = spack.db.get('multimethod^mpich2@1.2')
|
||||
pkg = spack.repo.get('multimethod^mpich2@1.2')
|
||||
self.assertEqual(pkg.mpi_version(), 2)
|
||||
|
||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
||||
pkg = spack.repo.get('multimethod^mpich@1.0')
|
||||
self.assertEqual(pkg.mpi_version(), 1)
|
||||
|
||||
|
||||
def test_undefined_mpi_version(self):
|
||||
pkg = spack.db.get('multimethod^mpich@0.4')
|
||||
pkg = spack.repo.get('multimethod^mpich@0.4')
|
||||
self.assertEqual(pkg.mpi_version(), 1)
|
||||
|
||||
pkg = spack.db.get('multimethod^mpich@1.4')
|
||||
pkg = spack.repo.get('multimethod^mpich@1.4')
|
||||
self.assertEqual(pkg.mpi_version(), 1)
|
||||
|
||||
|
||||
def test_default_works(self):
|
||||
pkg = spack.db.get('multimethod%gcc')
|
||||
pkg = spack.repo.get('multimethod%gcc')
|
||||
self.assertEqual(pkg.has_a_default(), 'gcc')
|
||||
|
||||
pkg = spack.db.get('multimethod%intel')
|
||||
pkg = spack.repo.get('multimethod%intel')
|
||||
self.assertEqual(pkg.has_a_default(), 'intel')
|
||||
|
||||
pkg = spack.db.get('multimethod%pgi')
|
||||
pkg = spack.repo.get('multimethod%pgi')
|
||||
self.assertEqual(pkg.has_a_default(), 'default')
|
||||
|
||||
|
||||
def test_architecture_match(self):
|
||||
pkg = spack.db.get('multimethod=x86_64')
|
||||
pkg = spack.repo.get('multimethod=x86_64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
|
||||
|
||||
pkg = spack.db.get('multimethod=ppc64')
|
||||
pkg = spack.repo.get('multimethod=ppc64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
|
||||
|
||||
pkg = spack.db.get('multimethod=ppc32')
|
||||
pkg = spack.repo.get('multimethod=ppc32')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
|
||||
|
||||
pkg = spack.db.get('multimethod=arm64')
|
||||
pkg = spack.repo.get('multimethod=arm64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'arm64')
|
||||
|
||||
pkg = spack.db.get('multimethod=macos')
|
||||
pkg = spack.repo.get('multimethod=macos')
|
||||
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
|
||||
|
||||
|
||||
def test_dependency_match(self):
|
||||
pkg = spack.db.get('multimethod^zmpi')
|
||||
pkg = spack.repo.get('multimethod^zmpi')
|
||||
self.assertEqual(pkg.different_by_dep(), 'zmpi')
|
||||
|
||||
pkg = spack.db.get('multimethod^mpich')
|
||||
pkg = spack.repo.get('multimethod^mpich')
|
||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||
|
||||
# If we try to switch on some entirely different dep, it's ambiguous,
|
||||
# but should take the first option
|
||||
pkg = spack.db.get('multimethod^foobar')
|
||||
pkg = spack.repo.get('multimethod^foobar')
|
||||
self.assertEqual(pkg.different_by_dep(), 'mpich')
|
||||
|
||||
|
||||
def test_virtual_dep_match(self):
|
||||
pkg = spack.db.get('multimethod^mpich2')
|
||||
pkg = spack.repo.get('multimethod^mpich2')
|
||||
self.assertEqual(pkg.different_by_virtual_dep(), 2)
|
||||
|
||||
pkg = spack.db.get('multimethod^mpich@1.0')
|
||||
pkg = spack.repo.get('multimethod^mpich@1.0')
|
||||
self.assertEqual(pkg.different_by_virtual_dep(), 1)
|
||||
|
@ -35,8 +35,8 @@ class PackageSanityTest(unittest.TestCase):
|
||||
|
||||
def check_db(self):
|
||||
"""Get all packages in a DB to make sure they work."""
|
||||
for name in spack.db.all_package_names():
|
||||
spack.db.get(name)
|
||||
for name in spack.repo.all_package_names():
|
||||
spack.repo.get(name)
|
||||
|
||||
|
||||
def test_get_all_packages(self):
|
||||
@ -47,14 +47,14 @@ def test_get_all_packages(self):
|
||||
def ztest_get_all_mock_packages(self):
|
||||
"""Get the mock packages once each too."""
|
||||
db = RepoPath(spack.mock_packages_path)
|
||||
spack.db.swap(db)
|
||||
spack.repo.swap(db)
|
||||
self.check_db()
|
||||
spack.db.swap(db)
|
||||
spack.repo.swap(db)
|
||||
|
||||
|
||||
def ztest_url_versions(self):
|
||||
"""Check URLs for regular packages, if they are explicitly defined."""
|
||||
for pkg in spack.db.all_packages():
|
||||
for pkg in spack.repo.all_packages():
|
||||
for v, vattrs in pkg.versions.items():
|
||||
if 'url' in vattrs:
|
||||
# If there is a url for the version check it.
|
||||
|
@ -35,11 +35,11 @@
|
||||
class PackagesTest(MockPackagesTest):
|
||||
|
||||
def test_load_package(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
pkg = spack.repo.get('mpich')
|
||||
|
||||
|
||||
def test_package_name(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
pkg = spack.repo.get('mpich')
|
||||
self.assertEqual(pkg.name, 'mpich')
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ def test_package_filename(self):
|
||||
|
||||
|
||||
def test_package_name(self):
|
||||
pkg = spack.db.get('mpich')
|
||||
pkg = spack.repo.get('mpich')
|
||||
self.assertEqual(pkg.name, 'mpich')
|
||||
|
||||
|
||||
|
@ -54,8 +54,8 @@ def pyfiles(self, *search_paths):
|
||||
|
||||
|
||||
def package_py_files(self):
|
||||
for name in spack.db.all_package_names():
|
||||
yield spack.db.filename_for_package_name(name)
|
||||
for name in spack.repo.all_package_names():
|
||||
yield spack.repo.filename_for_package_name(name)
|
||||
|
||||
|
||||
def check_python_versions(self, *files):
|
||||
|
@ -49,7 +49,7 @@ def setUp(self):
|
||||
|
||||
spec = Spec('svn-test')
|
||||
spec.concretize()
|
||||
self.pkg = spack.db.get(spec, new=True)
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -79,15 +79,15 @@ def setUp(self):
|
||||
pkgX.installed = False
|
||||
pkgY.installed = False
|
||||
|
||||
self.saved_db = spack.db
|
||||
self.saved_db = spack.repo
|
||||
pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
|
||||
spack.db = pkgDb
|
||||
spack.repo = pkgDb
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
super(UnitInstallTest, self).tearDown()
|
||||
|
||||
spack.db = self.saved_db
|
||||
spack.repo = self.saved_db
|
||||
|
||||
def test_installing_both(self):
|
||||
mo = MockOutput()
|
||||
|
Loading…
Reference in New Issue
Block a user