Convert lazy singleton functions to Singleton object
- simplify the singleton pattern across the codebase - reduce lines of code needed for crufty initialization - reduce functions that need to mess with a global - Singletons whose semantics changed: - spack.store.store() -> spack.store - spack.repo.path() -> spack.repo.path - spack.config.config() -> spack.config.config - spack.caches.fetch_cache() -> spack.caches.fetch_cache - spack.caches.misc_cache() -> spack.caches.misc_cache
This commit is contained in:
@@ -531,3 +531,48 @@ def __init__(self, wrapped_object):
|
||||
self.__class__ = type(wrapped_name, (wrapped_cls,), {})
|
||||
|
||||
self.__dict__ = wrapped_object.__dict__
|
||||
|
||||
|
||||
class Singleton(object):
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory):
|
||||
"""Create a new singleton to be inited with the factory function.
|
||||
|
||||
Args:
|
||||
factory (function): function taking no arguments that
|
||||
creates the singleton instance.
|
||||
"""
|
||||
self.factory = factory
|
||||
self._instance = None
|
||||
|
||||
@property
|
||||
def instance(self):
|
||||
if self._instance is None:
|
||||
self._instance = self.factory()
|
||||
return self._instance
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.instance, name)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.instance)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
class LazyReference(object):
|
||||
"""Lazily evaluated reference to part of a singleton."""
|
||||
|
||||
def __init__(self, ref_function):
|
||||
self.ref_function = ref_function
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.ref_function(), name)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.ref_function())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.ref_function())
|
||||
|
@@ -149,7 +149,7 @@ def write_buildinfo_file(prefix, workdir, rel=False):
|
||||
# This cuts down on the number of files added to the list
|
||||
# of files potentially needing relocation
|
||||
if relocate.strings_contains_installroot(
|
||||
path_name, spack.store.store().layout.root):
|
||||
path_name, spack.store.layout.root):
|
||||
filetype = relocate.get_filetype(path_name)
|
||||
if relocate.needs_binary_relocation(filetype, os_id):
|
||||
rel_path_name = os.path.relpath(path_name, prefix)
|
||||
@@ -161,9 +161,9 @@ def write_buildinfo_file(prefix, workdir, rel=False):
|
||||
# Create buildinfo data and write it to disk
|
||||
buildinfo = {}
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['buildpath'] = spack.store.store().layout.root
|
||||
buildinfo['buildpath'] = spack.store.layout.root
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
prefix, spack.store.store().layout.root)
|
||||
prefix, spack.store.layout.root)
|
||||
buildinfo['relocate_textfiles'] = text_to_relocate
|
||||
buildinfo['relocate_binaries'] = binary_to_relocate
|
||||
filename = buildinfo_file_name(workdir)
|
||||
@@ -334,7 +334,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
spec.prefix, spack.store.store().layout.root)
|
||||
spec.prefix, spack.store.layout.root)
|
||||
spec_dict['buildinfo'] = buildinfo
|
||||
with open(specfile_path, 'w') as outfile:
|
||||
outfile.write(yaml.dump(spec_dict))
|
||||
@@ -414,7 +414,7 @@ def relocate_package(workdir, allow_root):
|
||||
Relocate the given package
|
||||
"""
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_path = spack.store.store().layout.root
|
||||
new_path = spack.store.layout.root
|
||||
old_path = buildinfo['buildpath']
|
||||
rel = buildinfo.get('relative_rpaths', False)
|
||||
if rel:
|
||||
@@ -493,7 +493,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
"It cannot be installed.")
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix,
|
||||
spack.store.store().layout.root))
|
||||
spack.store.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get('buildinfo', {})
|
||||
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
|
||||
|
@@ -256,7 +256,7 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
env.set(SPACK_PREFIX, pkg.prefix)
|
||||
|
||||
# Install root prefix
|
||||
env.set(SPACK_INSTALL, spack.store.store().root)
|
||||
env.set(SPACK_INSTALL, spack.store.root)
|
||||
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere.
|
||||
|
@@ -24,48 +24,47 @@
|
||||
##############################################################################
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.paths
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.util.file_cache
|
||||
from spack.util.path import canonicalize_path
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
|
||||
_misc_cache = None
|
||||
_fetch_cache = None
|
||||
|
||||
|
||||
def misc_cache():
|
||||
def _misc_cache():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
global _misc_cache
|
||||
path = spack.config.get('config:misc_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.user_config_path, 'cache')
|
||||
path = canonicalize_path(path)
|
||||
|
||||
if _misc_cache is None:
|
||||
path = spack.config.get('config:misc_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.user_config_path, 'cache')
|
||||
path = canonicalize_path(path)
|
||||
_misc_cache = FileCache(path)
|
||||
|
||||
return _misc_cache
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
def fetch_cache():
|
||||
#: Spack's cache for small data
|
||||
misc_cache = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
|
||||
def _fetch_cache():
|
||||
"""Filesystem cache of downloaded archives.
|
||||
|
||||
This prevents Spack from repeatedly fetch the same files when
|
||||
building the same package different ways or multiple times.
|
||||
"""
|
||||
global _fetch_cache
|
||||
path = spack.config.get('config:source_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.var_path, "cache")
|
||||
path = canonicalize_path(path)
|
||||
|
||||
if _fetch_cache is None:
|
||||
path = spack.config.get('config:source_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.var_path, "cache")
|
||||
path = canonicalize_path(path)
|
||||
_fetch_cache = spack.fetch_strategy.FsCache(path)
|
||||
return spack.fetch_strategy.FsCache(path)
|
||||
|
||||
return _fetch_cache
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
@@ -49,7 +49,7 @@ def default_modify_scope():
|
||||
Commands that modify configuration by default modify the *highest*
|
||||
priority scope.
|
||||
"""
|
||||
return spack.config.config().highest_precedence_scope().name
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -197,7 +197,7 @@ def elide_list(line_list, max_num=10):
|
||||
|
||||
|
||||
def disambiguate_spec(spec):
|
||||
matching_specs = spack.store.store().db.query(spec)
|
||||
matching_specs = spack.store.db.query(spec)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
|
||||
|
@@ -54,10 +54,10 @@ def activate(parser, args):
|
||||
if not spec.package.is_extension:
|
||||
tty.die("%s is not an extension." % spec.name)
|
||||
|
||||
layout = spack.store.store().extensions
|
||||
layout = spack.store.extensions
|
||||
if args.view is not None:
|
||||
layout = YamlViewExtensionsLayout(
|
||||
args.view, spack.store.store().layout)
|
||||
args.view, spack.store.layout)
|
||||
|
||||
if spec.package.is_activated(extensions_layout=layout):
|
||||
tty.msg("Package %s is already activated." % specs[0].short_spec)
|
||||
|
@@ -68,7 +68,7 @@ def bootstrap(parser, args, **kwargs):
|
||||
requirement_dict = {'environment-modules': 'environment-modules~X'}
|
||||
|
||||
for requirement in requirement_dict:
|
||||
installed_specs = spack.store.store().db.query(requirement)
|
||||
installed_specs = spack.store.db.query(requirement)
|
||||
if(len(installed_specs) > 0):
|
||||
tty.msg("Requirement %s is satisfied with installed "
|
||||
"package %s" % (requirement, installed_specs[0]))
|
||||
|
@@ -116,7 +116,7 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, force=False):
|
||||
has_errors = False
|
||||
specs = spack.cmd.parse_specs(pkgs)
|
||||
for spec in specs:
|
||||
matching = spack.store.store().db.query(spec)
|
||||
matching = spack.store.db.query(spec)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -254,7 +254,7 @@ def install_tarball(spec, args):
|
||||
tty.msg('Installing buildcache for spec %s' % spec.format())
|
||||
bindist.extract_tarball(spec, tarball, args.allow_root,
|
||||
args.unsigned, args.force)
|
||||
spack.store.store().reindex()
|
||||
spack.store.store.reindex()
|
||||
else:
|
||||
tty.die('Download of binary cache file for spec %s failed.' %
|
||||
spec.format())
|
||||
|
@@ -83,8 +83,8 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg('Removing cached downloads')
|
||||
spack.caches.fetch_cache().destroy()
|
||||
spack.caches.fetch_cache.destroy()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg('Removing cached information on repositories')
|
||||
spack.caches.misc_cache().destroy()
|
||||
spack.caches.misc_cache.destroy()
|
||||
|
@@ -73,12 +73,12 @@ def _specs(self, **kwargs):
|
||||
|
||||
# return everything for an empty query.
|
||||
if not qspecs:
|
||||
return spack.store.store().db.query(**kwargs)
|
||||
return spack.store.db.query(**kwargs)
|
||||
|
||||
# Return only matching stuff otherwise.
|
||||
specs = set()
|
||||
for spec in qspecs:
|
||||
for s in spack.store.store().db.query(spec, **kwargs):
|
||||
for s in spack.store.db.query(spec, **kwargs):
|
||||
specs.add(s)
|
||||
return sorted(specs)
|
||||
|
||||
|
@@ -115,7 +115,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = 's' if n > 1 else ''
|
||||
|
||||
config = spack.config.config()
|
||||
config = spack.config.config
|
||||
filename = config.get_config_filename(args.scope, 'compilers')
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
||||
|
@@ -56,7 +56,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def config_get(args):
|
||||
spack.config.config().print_section(args.section)
|
||||
spack.config.config.print_section(args.section)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -68,7 +68,7 @@ def config_edit(args):
|
||||
if not args.section:
|
||||
args.section = None
|
||||
|
||||
config = spack.config.config()
|
||||
config = spack.config.config
|
||||
config_file = config.get_config_filename(args.scope, args.section)
|
||||
spack.editor(config_file)
|
||||
|
||||
|
@@ -654,11 +654,11 @@ def get_repository(args, name):
|
||||
"namespace {1}".format(spec.namespace, repo.namespace))
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path().get_repo(spec.namespace, None)
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path().first_repo()
|
||||
repo = spack.repo.path.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
@@ -60,15 +60,15 @@ def deactivate(parser, args):
|
||||
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||
pkg = spec.package
|
||||
|
||||
layout = spack.store.store().extensions
|
||||
layout = spack.store.extensions
|
||||
if args.view is not None:
|
||||
layout = YamlViewExtensionsLayout(
|
||||
args.view, spack.store.store().layout)
|
||||
args.view, spack.store.layout)
|
||||
|
||||
if args.all:
|
||||
if pkg.extendable:
|
||||
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
|
||||
ext_pkgs = spack.store.store().db.activated_extensions_for(
|
||||
ext_pkgs = spack.store.db.activated_extensions_for(
|
||||
spec, extensions_layout=layout)
|
||||
|
||||
for ext_pkg in ext_pkgs:
|
||||
|
@@ -76,16 +76,16 @@ def create_db_tarball(args):
|
||||
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||
tarball_path = os.path.abspath(tarball_name)
|
||||
|
||||
base = os.path.basename(spack.store.store().root)
|
||||
base = os.path.basename(spack.store.root)
|
||||
transform_args = []
|
||||
if 'GNU' in tar('--version', output=str):
|
||||
transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
|
||||
else:
|
||||
transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(spack.store.store().root)
|
||||
wd = os.path.dirname(spack.store.root)
|
||||
with working_dir(wd):
|
||||
files = [spack.store.store().db._index_path]
|
||||
files = [spack.store.db._index_path]
|
||||
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
||||
|
@@ -60,7 +60,7 @@ def dependencies(parser, args):
|
||||
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||
|
||||
tty.msg("Dependencies of %s" % spec.format('$_$@$%@$/', color=True))
|
||||
deps = spack.store.store().db.installed_relatives(
|
||||
deps = spack.store.db.installed_relatives(
|
||||
spec, 'children', args.transitive)
|
||||
if deps:
|
||||
spack.cmd.display_specs(deps, long=True)
|
||||
@@ -75,7 +75,7 @@ def dependencies(parser, args):
|
||||
else:
|
||||
packages = [
|
||||
spack.repo.get(s.name)
|
||||
for s in spack.repo.path().providers_for(spec)]
|
||||
for s in spack.repo.path.providers_for(spec)]
|
||||
|
||||
dependencies = set()
|
||||
for pkg in packages:
|
||||
|
@@ -57,14 +57,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg in spack.repo.path().all_packages():
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
dag.setdefault(pkg.name, set())
|
||||
for dep in pkg.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path().is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path().providers_for(dep)]
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg.name)
|
||||
@@ -103,7 +103,7 @@ def dependents(parser, args):
|
||||
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||
|
||||
tty.msg("Dependents of %s" % spec.cformat('$_$@$%@$/'))
|
||||
deps = spack.store.store().db.installed_relatives(
|
||||
deps = spack.store.db.installed_relatives(
|
||||
spec, 'parents', args.transitive)
|
||||
if deps:
|
||||
spack.cmd.display_specs(deps, long=True)
|
||||
|
@@ -78,7 +78,7 @@ def diy(self, args):
|
||||
tty.die("spack diy only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path().exists(spec.name):
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
tty.die("No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package")
|
||||
|
||||
|
@@ -50,9 +50,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path().get_repo(namespace)
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path()
|
||||
repo = spack.repo.path
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
@@ -105,7 +105,7 @@ def extensions(parser, args):
|
||||
if show_packages:
|
||||
#
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path().extensions_for(spec)
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
@@ -113,17 +113,17 @@ def extensions(parser, args):
|
||||
tty.msg("%d extensions:" % len(extensions))
|
||||
colify(ext.name for ext in extensions)
|
||||
|
||||
layout = spack.store.store().extensions
|
||||
layout = spack.store.extensions
|
||||
if args.view is not None:
|
||||
layout = YamlViewExtensionsLayout(
|
||||
args.view, spack.store.store().layout)
|
||||
args.view, spack.store.layout)
|
||||
|
||||
if show_installed:
|
||||
#
|
||||
# List specs of installed extensions.
|
||||
#
|
||||
store = spack.store.store()
|
||||
installed = [s.spec for s in store.db.installed_extensions_for(spec)]
|
||||
installed = [
|
||||
s.spec for s in spack.store.db.installed_extensions_for(spec)]
|
||||
|
||||
if show_all:
|
||||
print
|
||||
|
@@ -147,7 +147,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path().packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
query_specs = [x for x in query_specs if x.name in packages_with_tags]
|
||||
|
||||
# Display the result
|
||||
|
@@ -77,7 +77,7 @@ def graph(parser, args):
|
||||
if args.specs:
|
||||
tty.die("Can't specify specs with --installed")
|
||||
args.dot = True
|
||||
specs = spack.store.store().db.query()
|
||||
specs = spack.store.db.query()
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(
|
||||
|
@@ -244,7 +244,7 @@ def install(parser, args, **kwargs):
|
||||
"only one spec is allowed when overwriting an installation"
|
||||
|
||||
spec = specs[0]
|
||||
t = spack.store.store().db.query(spec)
|
||||
t = spack.store.db.query(spec)
|
||||
assert len(t) == 1, "to overwrite a spec you must install it first"
|
||||
|
||||
# Give the user a last chance to think about overwriting an already
|
||||
|
@@ -303,7 +303,7 @@ def list(parser, args):
|
||||
# Filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = set(
|
||||
spack.repo.path().packages_with_tags(*args.tags))
|
||||
spack.repo.path.packages_with_tags(*args.tags))
|
||||
sorted_packages = set(sorted_packages) & packages_with_tags
|
||||
sorted_packages = sorted(sorted_packages)
|
||||
|
||||
|
@@ -80,7 +80,7 @@ def location(parser, args):
|
||||
print(spack.paths.prefix)
|
||||
|
||||
elif args.packages:
|
||||
print(spack.repo.path().first_repo().root)
|
||||
print(spack.repo.path.first_repo().root)
|
||||
|
||||
elif args.stages:
|
||||
print(spack.paths.stage_path)
|
||||
@@ -102,7 +102,7 @@ def location(parser, args):
|
||||
|
||||
if args.package_dir:
|
||||
# This one just needs the spec name.
|
||||
print(spack.repo.path().dirname_for_package_name(spec.name))
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
|
||||
else:
|
||||
# These versions need concretized specs.
|
||||
|
@@ -283,7 +283,7 @@ def refresh(module_types, specs, args):
|
||||
# skip unknown packages.
|
||||
writers = [
|
||||
cls(spec) for spec in specs
|
||||
if spack.repo.path().exists(spec.name)]
|
||||
if spack.repo.path.exists(spec.name)]
|
||||
|
||||
# Filter blacklisted packages early
|
||||
writers = [x for x in writers if not x.conf.blacklisted]
|
||||
|
@@ -91,7 +91,7 @@ def list_packages(rev):
|
||||
|
||||
def pkg_add(args):
|
||||
for pkg_name in args.packages:
|
||||
filename = spack.repo.path().filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" %
|
||||
pkg_name, filename)
|
||||
|
@@ -46,7 +46,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path().provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
|
||||
buffer = six.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -77,5 +77,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path().providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
print('')
|
||||
|
@@ -31,4 +31,4 @@
|
||||
|
||||
|
||||
def reindex(parser, args):
|
||||
spack.store.store().reindex()
|
||||
spack.store.store.reindex()
|
||||
|
@@ -135,9 +135,9 @@ def setup(self, args):
|
||||
tty.die("spack setup only takes one spec.")
|
||||
|
||||
# Take a write lock before checking for existence.
|
||||
with spack.store.store().db.write_transaction():
|
||||
with spack.store.db.write_transaction():
|
||||
spec = specs[0]
|
||||
if not spack.repo.path().exists(spec.name):
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
tty.die("No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package")
|
||||
if not spec.versions.concrete:
|
||||
|
@@ -93,7 +93,7 @@ def find_matching_specs(specs, allow_multiple_matches=False, force=False):
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
matching = spack.store.store().db.query(spec)
|
||||
matching = spack.store.db.query(spec)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -128,7 +128,7 @@ def installed_dependents(specs):
|
||||
"""
|
||||
dependents = {}
|
||||
for item in specs:
|
||||
installed = spack.store.store().db.installed_relatives(
|
||||
installed = spack.store.db.installed_relatives(
|
||||
item, 'parents', True)
|
||||
lst = [x for x in installed if x not in specs]
|
||||
if lst:
|
||||
@@ -158,7 +158,7 @@ def do_uninstall(specs, force):
|
||||
# Sort packages to be uninstalled by the number of installed dependents
|
||||
# This ensures we do things in the right order
|
||||
def num_installed_deps(pkg):
|
||||
dependents = spack.store.store().db.installed_relatives(
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
pkg.spec, 'parents', True)
|
||||
return len(dependents)
|
||||
|
||||
|
@@ -144,7 +144,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg in spack.repo.path().all_packages():
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
url = getattr(pkg.__class__, 'url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg)
|
||||
|
||||
@@ -178,7 +178,7 @@ def url_summary(args):
|
||||
tty.msg('Generating a summary of URL parsing in Spack...')
|
||||
|
||||
# Loop through all packages
|
||||
for pkg in spack.repo.path().all_packages():
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
urls = set()
|
||||
|
||||
url = getattr(pkg.__class__, 'url', None)
|
||||
|
@@ -93,7 +93,7 @@ def squash(matching_specs):
|
||||
return matching_specs[0]
|
||||
|
||||
# make function always return a list to keep consistency between py2/3
|
||||
return list(map(squash, map(spack.store.store().db.query, specs)))
|
||||
return list(map(squash, map(spack.store.db.query, specs)))
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
@@ -176,7 +176,7 @@ def view(parser, args):
|
||||
path = args.path[0]
|
||||
|
||||
view = YamlFilesystemView(
|
||||
path, spack.store.store().layout,
|
||||
path, spack.store.layout,
|
||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||
link=os.link if args.action in ["hardlink", "hard"]
|
||||
else os.symlink,
|
||||
|
@@ -116,7 +116,7 @@ def init_compiler_config():
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config()
|
||||
config = spack.config.config
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get('compilers', scope=name)
|
||||
@@ -339,7 +339,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config()
|
||||
config = spack.config.config
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
@@ -39,6 +39,8 @@
|
||||
from contextlib import contextmanager
|
||||
from six import iteritems
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.abi
|
||||
import spack.spec
|
||||
@@ -54,27 +56,11 @@
|
||||
|
||||
|
||||
#: Concretizer singleton
|
||||
_concretizer = None
|
||||
concretizer = llnl.util.lang.Singleton(lambda: Concretizer())
|
||||
|
||||
|
||||
#: impements rudimentary logic for ABI compatibility
|
||||
_abi_checker = None
|
||||
|
||||
|
||||
def _abi():
|
||||
"""Get an ABI checker object."""
|
||||
global _abi_checker
|
||||
if _abi_checker is None:
|
||||
_abi_checker = spack.abi.ABI()
|
||||
return _abi_checker
|
||||
|
||||
|
||||
def concretizer():
|
||||
"""Get concretizer singleton."""
|
||||
global _concretizer
|
||||
if _concretizer is None:
|
||||
_concretizer = Concretizer()
|
||||
return _concretizer
|
||||
_abi = llnl.util.lang.Singleton(lambda: spack.abi.ABI())
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -102,7 +88,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path().providers_for(spec)
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.spec.UnsatisfiableProviderSpecError(
|
||||
candidates[0], spec)
|
||||
@@ -163,8 +149,8 @@ def choose_virtual_or_external(self, spec):
|
||||
return sorted(candidates,
|
||||
reverse=True,
|
||||
key=lambda spec: (
|
||||
_abi().compatible(spec, abi_exemplar, loose=True),
|
||||
_abi().compatible(spec, abi_exemplar)))
|
||||
_abi.compatible(spec, abi_exemplar, loose=True),
|
||||
_abi.compatible(spec, abi_exemplar)))
|
||||
|
||||
def concretize_version(self, spec):
|
||||
"""If the spec is already concrete, return. Otherwise take
|
||||
|
@@ -61,6 +61,7 @@
|
||||
import yaml
|
||||
from yaml.error import MarkedYAMLError
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
@@ -456,17 +457,16 @@ def override(path, value):
|
||||
"""Simple way to override config settings within a context."""
|
||||
overrides = InternalConfigScope('overrides')
|
||||
|
||||
cfg = config()
|
||||
cfg.push_scope(overrides)
|
||||
cfg.set(path, value, scope='overrides')
|
||||
config.push_scope(overrides)
|
||||
config.set(path, value, scope='overrides')
|
||||
|
||||
yield cfg
|
||||
yield config
|
||||
|
||||
scope = cfg.pop_scope()
|
||||
scope = config.pop_scope()
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
def config():
|
||||
def _config():
|
||||
"""Singleton Configuration instance.
|
||||
|
||||
This constructs one instance associated with this module and returns
|
||||
@@ -477,40 +477,37 @@ def config():
|
||||
(Configuration): object for accessing spack configuration
|
||||
|
||||
"""
|
||||
global _configuration
|
||||
if not _configuration:
|
||||
_configuration = Configuration()
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
defaults = InternalConfigScope('_builtin', config_defaults)
|
||||
_configuration.push_scope(defaults)
|
||||
# first do the builtin, hardcoded defaults
|
||||
defaults = InternalConfigScope('_builtin', config_defaults)
|
||||
cfg.push_scope(defaults)
|
||||
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
platform = spack.architecture.platform().name
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
platform = spack.architecture.platform().name
|
||||
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
_configuration.push_scope(ConfigScope(name, path))
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
cfg.push_scope(ConfigScope(name, path))
|
||||
|
||||
plat_name = '%s/%s' % (name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
_configuration.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_name = '%s/%s' % (name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
cfg.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
_configuration.push_scope(InternalConfigScope('command_line'))
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
cfg.push_scope(InternalConfigScope('command_line'))
|
||||
|
||||
return _configuration
|
||||
return cfg
|
||||
|
||||
|
||||
#: This is the global singleton configuration for Spack.
|
||||
#: TODO: consider making this NOT global and associate it with a spack instance
|
||||
_configuration = None
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config = llnl.util.lang.Singleton(_config)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config().get(path, default, scope)
|
||||
return config.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -518,12 +515,12 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config().set(path, value, scope)
|
||||
return config.set(path, value, scope)
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config().scopes
|
||||
return config.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
|
@@ -223,7 +223,7 @@ def prefix_lock(self, spec):
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.store().db.prefix_lock`` -- the DB
|
||||
The lock file is ``spack.store.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
@@ -366,7 +366,7 @@ def check(cond, msg):
|
||||
if version > _db_version:
|
||||
raise InvalidDatabaseVersionError(_db_version, version)
|
||||
elif version < _db_version:
|
||||
self.reindex(spack.store.store().layout)
|
||||
self.reindex(spack.store.layout)
|
||||
installs = dict((k, v.to_dict()) for k, v in self._data.items())
|
||||
|
||||
def invalid_record(hash_key, error):
|
||||
@@ -499,7 +499,7 @@ def _read_suppress_error():
|
||||
tty.debug(
|
||||
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
|
||||
try:
|
||||
layout = spack.store.store().layout
|
||||
layout = spack.store.layout
|
||||
if entry.spec.external:
|
||||
layout = None
|
||||
install_check = True
|
||||
@@ -609,7 +609,7 @@ def _read(self):
|
||||
# reindex() takes its own write lock, so no lock here.
|
||||
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
|
||||
self._write(None, None, None)
|
||||
self.reindex(spack.store.store().layout)
|
||||
self.reindex(spack.store.layout)
|
||||
|
||||
def _add(
|
||||
self,
|
||||
@@ -823,7 +823,7 @@ def activated_extensions_for(self, extendee_spec, extensions_layout=None):
|
||||
the given spec
|
||||
"""
|
||||
if extensions_layout is None:
|
||||
extensions_layout = spack.store.store().extensions
|
||||
extensions_layout = spack.store.extensions
|
||||
for spec in self.query():
|
||||
try:
|
||||
extensions_layout.check_activated(extendee_spec, spec)
|
||||
@@ -903,7 +903,7 @@ def query(
|
||||
if explicit is not any and rec.explicit != explicit:
|
||||
continue
|
||||
|
||||
if known is not any and spack.repo.path().exists(
|
||||
if known is not any and spack.repo.path.exists(
|
||||
rec.spec.name) != known:
|
||||
continue
|
||||
|
||||
|
@@ -267,7 +267,7 @@ def add_standalone(self, spec):
|
||||
# Check for globally activated extensions in the extendee that
|
||||
# we're looking at.
|
||||
activated = [p.spec for p in
|
||||
spack.store.store().db.activated_extensions_for(spec)]
|
||||
spack.store.db.activated_extensions_for(spec)]
|
||||
if activated:
|
||||
tty.error("Globally activated extensions cannot be used in "
|
||||
"conjunction with filesystem views. "
|
||||
@@ -391,7 +391,7 @@ def remove_standalone(self, spec):
|
||||
|
||||
def get_all_specs(self):
|
||||
dotspack = join_path(self.root,
|
||||
spack.store.store().layout.metadata_dir)
|
||||
spack.store.layout.metadata_dir)
|
||||
if os.path.exists(dotspack):
|
||||
return list(filter(None, map(self.get_spec, os.listdir(dotspack))))
|
||||
else:
|
||||
@@ -409,13 +409,13 @@ def get_conflicts(self, *specs):
|
||||
def get_path_meta_folder(self, spec):
|
||||
"Get path to meta folder for either spec or spec name."
|
||||
return join_path(self.root,
|
||||
spack.store.store().layout.metadata_dir,
|
||||
spack.store.layout.metadata_dir,
|
||||
getattr(spec, "name", spec))
|
||||
|
||||
def get_spec(self, spec):
|
||||
dotspack = self.get_path_meta_folder(spec)
|
||||
filename = join_path(dotspack,
|
||||
spack.store.store().layout.spec_file_name)
|
||||
spack.store.layout.spec_file_name)
|
||||
|
||||
try:
|
||||
with open(filename, "r") as f:
|
||||
@@ -424,7 +424,7 @@ def get_spec(self, spec):
|
||||
return None
|
||||
|
||||
def link_meta_folder(self, spec):
|
||||
src = spack.store.store().layout.metadata_path(spec)
|
||||
src = spack.store.layout.metadata_path(spec)
|
||||
tgt = self.get_path_meta_folder(spec)
|
||||
|
||||
tree = LinkTree(src)
|
||||
@@ -550,4 +550,4 @@ def get_dependencies(specs):
|
||||
|
||||
|
||||
def ignore_metadata_dir(f):
|
||||
return f in spack.store.store().layout.hidden_file_paths
|
||||
return f in spack.store.layout.hidden_file_paths
|
||||
|
@@ -36,7 +36,7 @@ def pre_run():
|
||||
|
||||
|
||||
def check_compiler_yaml_version():
|
||||
config = spack.config.config()
|
||||
config = spack.config.config
|
||||
|
||||
for scope in config.file_scopes:
|
||||
file_name = os.path.join(scope.path, 'compilers.yaml')
|
||||
|
@@ -661,9 +661,9 @@ def possible_dependencies(
|
||||
visited = set([self.name])
|
||||
|
||||
for i, name in enumerate(self.dependencies):
|
||||
if spack.repo.path().is_virtual(name):
|
||||
if spack.repo.path.is_virtual(name):
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.path().providers_for(name)
|
||||
providers = spack.repo.path.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.add(name)
|
||||
@@ -949,7 +949,7 @@ def is_activated(self, extensions_layout=None):
|
||||
raise ValueError(
|
||||
"is_extension called on package that is not an extension.")
|
||||
if extensions_layout is None:
|
||||
extensions_layout = spack.store.store().extensions
|
||||
extensions_layout = spack.store.extensions
|
||||
exts = extensions_layout.extension_map(self.extendee_spec)
|
||||
return (self.name in exts) and (exts[self.name] == self.spec)
|
||||
|
||||
@@ -1003,7 +1003,7 @@ def remove_prefix(self):
|
||||
Removes the prefix for a package along with any empty parent
|
||||
directories
|
||||
"""
|
||||
spack.store.store().layout.remove_install_directory(self.spec)
|
||||
spack.store.layout.remove_install_directory(self.spec)
|
||||
|
||||
def do_fetch(self, mirror_only=False):
|
||||
"""
|
||||
@@ -1241,8 +1241,7 @@ def do_fake_install(self):
|
||||
# Install fake man page
|
||||
mkdirp(self.prefix.man.man1)
|
||||
|
||||
store = spack.store.store()
|
||||
packages_dir = store.layout.build_packages_path(self.spec)
|
||||
packages_dir = spack.store.layout.build_packages_path(self.spec)
|
||||
dump_packages(self.spec, packages_dir)
|
||||
|
||||
def _if_make_target_execute(self, target):
|
||||
@@ -1305,7 +1304,7 @@ def _resource_stage(self, resource):
|
||||
def _stage_and_write_lock(self):
|
||||
"""Prefix lock nested in a stage."""
|
||||
with self.stage:
|
||||
with spack.store.store().db.prefix_write_lock(self.spec):
|
||||
with spack.store.db.prefix_write_lock(self.spec):
|
||||
yield
|
||||
|
||||
def _process_external_package(self, explicit):
|
||||
@@ -1329,7 +1328,7 @@ def _process_external_package(self, explicit):
|
||||
try:
|
||||
# Check if the package was already registered in the DB
|
||||
# If this is the case, then just exit
|
||||
rec = spack.store.store().db.get_record(self.spec)
|
||||
rec = spack.store.db.get_record(self.spec)
|
||||
message = '{s.name}@{s.version} : already registered in DB'
|
||||
tty.msg(message.format(s=self))
|
||||
# Update the value of rec.explicit if it is necessary
|
||||
@@ -1345,12 +1344,12 @@ def _process_external_package(self, explicit):
|
||||
# Add to the DB
|
||||
message = '{s.name}@{s.version} : registering into DB'
|
||||
tty.msg(message.format(s=self))
|
||||
spack.store.store().db.add(self.spec, None, explicit=explicit)
|
||||
spack.store.db.add(self.spec, None, explicit=explicit)
|
||||
|
||||
def _update_explicit_entry_in_db(self, rec, explicit):
|
||||
if explicit and not rec.explicit:
|
||||
with spack.store.store().db.write_transaction():
|
||||
rec = spack.store.store().db.get_record(self.spec)
|
||||
with spack.store.db.write_transaction():
|
||||
rec = spack.store.db.get_record(self.spec)
|
||||
rec.explicit = True
|
||||
message = '{s.name}@{s.version} : marking the package explicit'
|
||||
tty.msg(message.format(s=self))
|
||||
@@ -1367,7 +1366,7 @@ def try_install_from_binary_cache(self, explicit):
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, tarball, allow_root=False,
|
||||
unsigned=False, force=False)
|
||||
spack.store.store().db.add(
|
||||
spack.store.db.add(
|
||||
self.spec, spack.store.layout, explicit=explicit)
|
||||
return True
|
||||
|
||||
@@ -1425,15 +1424,15 @@ def do_install(self,
|
||||
partial = self.check_for_unfinished_installation(keep_prefix, restage)
|
||||
|
||||
# Ensure package is not already installed
|
||||
layout = spack.store.store().layout
|
||||
with spack.store.store().db.prefix_read_lock(self.spec):
|
||||
layout = spack.store.layout
|
||||
with spack.store.db.prefix_read_lock(self.spec):
|
||||
if partial:
|
||||
tty.msg(
|
||||
"Continuing from partial install of %s" % self.name)
|
||||
elif layout.check_installed(self.spec):
|
||||
msg = '{0.name} is already installed in {0.prefix}'
|
||||
tty.msg(msg.format(self))
|
||||
rec = spack.store.store().db.get_record(self.spec)
|
||||
rec = spack.store.db.get_record(self.spec)
|
||||
# In case the stage directory has already been created,
|
||||
# this ensures it's removed after we checked that the spec
|
||||
# is installed
|
||||
@@ -1568,7 +1567,7 @@ def build_process():
|
||||
try:
|
||||
# Create the install prefix and fork the build process.
|
||||
if not os.path.exists(self.prefix):
|
||||
spack.store.store().layout.create_install_directory(self.spec)
|
||||
spack.store.layout.create_install_directory(self.spec)
|
||||
|
||||
# Fork a child to do the actual installation
|
||||
# we preserve verbosity settings across installs.
|
||||
@@ -1579,8 +1578,8 @@ def build_process():
|
||||
keep_prefix = self.last_phase is None or keep_prefix
|
||||
# note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.store().db.add(
|
||||
self.spec, spack.store.store().layout, explicit=explicit
|
||||
spack.store.db.add(
|
||||
self.spec, spack.store.layout, explicit=explicit
|
||||
)
|
||||
except directory_layout.InstallDirectoryAlreadyExistsError:
|
||||
# Abort install if install directory exists.
|
||||
@@ -1634,9 +1633,9 @@ def check_for_unfinished_installation(
|
||||
raise ExternalPackageError("Attempted to repair external spec %s" %
|
||||
self.spec.name)
|
||||
|
||||
with spack.store.store().db.prefix_write_lock(self.spec):
|
||||
with spack.store.db.prefix_write_lock(self.spec):
|
||||
try:
|
||||
record = spack.store.store().db.get_record(self.spec)
|
||||
record = spack.store.db.get_record(self.spec)
|
||||
installed_in_db = record.installed if record else False
|
||||
except KeyError:
|
||||
installed_in_db = False
|
||||
@@ -1671,10 +1670,9 @@ def _do_install_pop_kwargs(self, kwargs):
|
||||
|
||||
def log(self):
|
||||
# Copy provenance into the install directory on success
|
||||
store = spack.store.store()
|
||||
log_install_path = store.layout.build_log_path(self.spec)
|
||||
env_install_path = store.layout.build_env_path(self.spec)
|
||||
packages_dir = store.layout.build_packages_path(self.spec)
|
||||
log_install_path = spack.store.layout.build_log_path(self.spec)
|
||||
env_install_path = spack.store.layout.build_env_path(self.spec)
|
||||
packages_dir = spack.store.layout.build_packages_path(self.spec)
|
||||
|
||||
# Remove first if we're overwriting another build
|
||||
# (can happen with spack setup)
|
||||
@@ -1693,7 +1691,7 @@ def log(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
errors = StringIO()
|
||||
target_dir = os.path.join(
|
||||
spack.store.store().layout.metadata_path(self.spec),
|
||||
spack.store.layout.metadata_path(self.spec),
|
||||
'archived-files')
|
||||
|
||||
for glob_expr in self.archive_files:
|
||||
@@ -1754,7 +1752,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
|
||||
installed = set(os.listdir(self.prefix))
|
||||
installed.difference_update(
|
||||
spack.store.store().layout.hidden_file_paths)
|
||||
spack.store.layout.hidden_file_paths)
|
||||
if not installed:
|
||||
raise InstallError(
|
||||
"Install failed for %s. Nothing was installed!" % self.name)
|
||||
@@ -1762,7 +1760,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
@property
|
||||
def build_log_path(self):
|
||||
if self.installed:
|
||||
return spack.store.store().layout.build_log_path(self.spec)
|
||||
return spack.store.layout.build_log_path(self.spec)
|
||||
else:
|
||||
return join_path(self.stage.source_path, 'spack-build.out')
|
||||
|
||||
@@ -1920,16 +1918,16 @@ def uninstall_by_spec(spec, force=False):
|
||||
if not os.path.isdir(spec.prefix):
|
||||
# prefix may not exist, but DB may be inconsistent. Try to fix by
|
||||
# removing, but omit hooks.
|
||||
specs = spack.store.store().db.query(spec, installed=True)
|
||||
specs = spack.store.db.query(spec, installed=True)
|
||||
if specs:
|
||||
spack.store.store().db.remove(specs[0])
|
||||
spack.store.db.remove(specs[0])
|
||||
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
|
||||
return
|
||||
else:
|
||||
raise InstallError(str(spec) + " is not installed.")
|
||||
|
||||
if not force:
|
||||
dependents = spack.store.store().db.installed_relatives(
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
spec, 'parents', True)
|
||||
if dependents:
|
||||
raise PackageStillNeededError(spec, dependents)
|
||||
@@ -1941,7 +1939,7 @@ def uninstall_by_spec(spec, force=False):
|
||||
pkg = None
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.store().db.prefix_write_lock(spec):
|
||||
with spack.store.db.prefix_write_lock(spec):
|
||||
|
||||
if pkg is not None:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -1950,11 +1948,11 @@ def uninstall_by_spec(spec, force=False):
|
||||
if not spec.external:
|
||||
msg = 'Deleting package prefix [{0}]'
|
||||
tty.debug(msg.format(spec.short_spec))
|
||||
spack.store.store().layout.remove_install_directory(spec)
|
||||
spack.store.layout.remove_install_directory(spec)
|
||||
# Delete DB entry
|
||||
msg = 'Deleting DB entry [{0}]'
|
||||
tty.debug(msg.format(spec.short_spec))
|
||||
spack.store.store().db.remove(spec)
|
||||
spack.store.db.remove(spec)
|
||||
|
||||
if pkg is not None:
|
||||
spack.hooks.post_uninstall(spec)
|
||||
@@ -2000,7 +1998,7 @@ def do_activate(self, with_dependencies=True, ignore_conflicts=False,
|
||||
self._sanity_check_extension()
|
||||
|
||||
if extensions_layout is None:
|
||||
extensions_layout = spack.store.store().extensions
|
||||
extensions_layout = spack.store.extensions
|
||||
|
||||
extensions_layout.check_extension_conflict(
|
||||
self.extendee_spec, self.spec)
|
||||
@@ -2044,11 +2042,11 @@ def activate(self, extension, ignore_conflicts=False, **kwargs):
|
||||
|
||||
"""
|
||||
extensions_layout = kwargs.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
target = extensions_layout.extendee_target_directory(self)
|
||||
|
||||
def ignore(filename):
|
||||
return (filename in spack.store.store().layout.hidden_file_paths or
|
||||
return (filename in spack.store.layout.hidden_file_paths or
|
||||
kwargs.get('ignore', lambda f: False)(filename))
|
||||
|
||||
tree = LinkTree(extension.prefix)
|
||||
@@ -2076,7 +2074,7 @@ def do_deactivate(self, **kwargs):
|
||||
verbose = kwargs.get("verbose", True)
|
||||
remove_dependents = kwargs.get("remove_dependents", False)
|
||||
extensions_layout = kwargs.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
|
||||
# Allow a force deactivate to happen. This can unlink
|
||||
# spurious files if something was corrupted.
|
||||
@@ -2128,11 +2126,11 @@ def deactivate(self, extension, **kwargs):
|
||||
|
||||
"""
|
||||
extensions_layout = kwargs.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
target = extensions_layout.extendee_target_directory(self)
|
||||
|
||||
def ignore(filename):
|
||||
return (filename in spack.store.store().layout.hidden_file_paths or
|
||||
return (filename in spack.store.layout.hidden_file_paths or
|
||||
kwargs.get('ignore', lambda f: False)(filename))
|
||||
|
||||
tree = LinkTree(extension.prefix)
|
||||
@@ -2277,7 +2275,7 @@ def flatten_dependencies(spec, flat_dir):
|
||||
for dep in spec.traverse(root=False):
|
||||
name = dep.name
|
||||
|
||||
dep_path = spack.store.store().layout.path_for_spec(dep)
|
||||
dep_path = spack.store.layout.path_for_spec(dep)
|
||||
dep_files = LinkTree(dep_path)
|
||||
|
||||
os.mkdir(flat_dir + '/' + name)
|
||||
@@ -2306,7 +2304,7 @@ def dump_packages(spec, path):
|
||||
if node is not spec:
|
||||
# Locate the dependency package in the install tree and find
|
||||
# its provenance information.
|
||||
source = spack.store.store().layout.build_packages_path(node)
|
||||
source = spack.store.layout.build_packages_path(node)
|
||||
source_repo_root = join_path(source, node.namespace)
|
||||
|
||||
# There's no provenance installed for the source package. Skip it.
|
||||
@@ -2334,7 +2332,7 @@ def dump_packages(spec, path):
|
||||
if node is not spec:
|
||||
install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
else:
|
||||
spack.repo.path().dump_provenance(node, dest_pkg_dir)
|
||||
spack.repo.path.dump_provenance(node, dest_pkg_dir)
|
||||
|
||||
|
||||
def print_pkg(message):
|
||||
|
@@ -51,7 +51,7 @@ def get_packages_config():
|
||||
# by sometihng, not just packages/names that don't exist.
|
||||
# So, this won't include, e.g., 'all'.
|
||||
virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
|
||||
if spack.repo.path().is_virtual(pkg_name)]
|
||||
if spack.repo.path.is_virtual(pkg_name)]
|
||||
|
||||
# die if there are virtuals in `packages.py`
|
||||
if virtuals:
|
||||
|
@@ -110,7 +110,7 @@ def get_placeholder_rpaths(path_name, orig_rpaths):
|
||||
Replaces original layout root dir with a placeholder string in all rpaths.
|
||||
"""
|
||||
rel_rpaths = []
|
||||
orig_dir = spack.store.store().layout.root
|
||||
orig_dir = spack.store.layout.root
|
||||
for rpath in orig_rpaths:
|
||||
if re.match(orig_dir, rpath):
|
||||
placeholder = set_placeholder(orig_dir)
|
||||
@@ -186,7 +186,7 @@ def macho_make_paths_placeholder(rpaths, deps, idpath):
|
||||
replacement are returned.
|
||||
"""
|
||||
new_idpath = None
|
||||
old_dir = spack.store.store().layout.root
|
||||
old_dir = spack.store.layout.root
|
||||
placeholder = set_placeholder(old_dir)
|
||||
if idpath:
|
||||
new_idpath = re.sub(old_dir, placeholder, idpath)
|
||||
@@ -425,9 +425,9 @@ def make_binary_placeholder(cur_path_names, allow_root):
|
||||
new_rpaths, new_deps, new_idpath)
|
||||
if (not allow_root and
|
||||
strings_contains_installroot(cur_path,
|
||||
spack.store.store().layout.root)):
|
||||
spack.store.layout.root)):
|
||||
raise InstallRootStringException(
|
||||
cur_path, spack.store.store().layout.root)
|
||||
cur_path, spack.store.layout.root)
|
||||
elif platform.system() == 'Linux':
|
||||
for cur_path in cur_path_names:
|
||||
orig_rpaths = get_existing_elf_rpaths(cur_path)
|
||||
@@ -436,9 +436,9 @@ def make_binary_placeholder(cur_path_names, allow_root):
|
||||
modify_elf_object(cur_path, new_rpaths)
|
||||
if (not allow_root and
|
||||
strings_contains_installroot(
|
||||
cur_path, spack.store.store().layout.root)):
|
||||
cur_path, spack.store.layout.root)):
|
||||
raise InstallRootStringException(
|
||||
cur_path, spack.store.store().layout.root)
|
||||
cur_path, spack.store.layout.root)
|
||||
else:
|
||||
tty.die("Placeholder not implemented for %s" % platform.system())
|
||||
|
||||
|
@@ -224,7 +224,7 @@ def update_package(self, pkg_name):
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
package = path().get(pkg_name)
|
||||
package = path.get(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
@@ -255,7 +255,7 @@ def make_provider_index_cache(packages_path, namespace):
|
||||
cache_filename = 'providers/{0}-index.yaml'.format(namespace)
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
misc_cache = spack.caches.misc_cache()
|
||||
misc_cache = spack.caches.misc_cache
|
||||
index_mtime = misc_cache.mtime(cache_filename)
|
||||
|
||||
needs_update = [
|
||||
@@ -309,7 +309,7 @@ def make_tag_index_cache(packages_path, namespace):
|
||||
cache_filename = 'tags/{0}-index.json'.format(namespace)
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
misc_cache = spack.caches.misc_cache()
|
||||
misc_cache = spack.caches.misc_cache
|
||||
index_mtime = misc_cache.mtime(cache_filename)
|
||||
|
||||
needs_update = [
|
||||
@@ -1079,60 +1079,60 @@ def create_repo(root, namespace=None):
|
||||
return full_path, namespace
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
_path = None
|
||||
|
||||
|
||||
def set_path(repo):
|
||||
"""Set the path() singleton to a specific value.
|
||||
|
||||
Overwrite _path and register it as an importer in sys.meta_path if
|
||||
it is a ``Repo`` or ``RepoPath``.
|
||||
"""
|
||||
global _path
|
||||
_path = repo
|
||||
|
||||
# make the new repo_path an importer if needed
|
||||
append = isinstance(repo, (Repo, RepoPath))
|
||||
if append:
|
||||
sys.meta_path.append(_path)
|
||||
return append
|
||||
|
||||
|
||||
def path():
|
||||
def _path():
|
||||
"""Get the singleton RepoPath instance for Spack.
|
||||
|
||||
Create a RepoPath, add it to sys.meta_path, and return it.
|
||||
|
||||
TODO: consider not making this a singleton.
|
||||
"""
|
||||
if _path is None:
|
||||
repo_dirs = spack.config.get('repos')
|
||||
if not repo_dirs:
|
||||
raise NoRepoConfiguredError(
|
||||
"Spack configuration contains no package repositories.")
|
||||
set_path(RepoPath(*repo_dirs))
|
||||
repo_dirs = spack.config.get('repos')
|
||||
if not repo_dirs:
|
||||
raise NoRepoConfiguredError(
|
||||
"Spack configuration contains no package repositories.")
|
||||
|
||||
return _path
|
||||
path = RepoPath(*repo_dirs)
|
||||
sys.meta_path.append(path)
|
||||
return path
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
path = llnl.util.lang.Singleton(_path)
|
||||
|
||||
|
||||
def get(spec):
|
||||
"""Convenience wrapper around ``spack.repo.get()``."""
|
||||
return path().get(spec)
|
||||
return path.get(spec)
|
||||
|
||||
|
||||
def all_package_names():
|
||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||
return path().all_package_names()
|
||||
return path.all_package_names()
|
||||
|
||||
|
||||
def set_path(repo):
|
||||
"""Set the path singleton to a specific value.
|
||||
|
||||
Overwrite ``path`` and register it as an importer in
|
||||
``sys.meta_path`` if it is a ``Repo`` or ``RepoPath``.
|
||||
"""
|
||||
global path
|
||||
path = repo
|
||||
|
||||
# make the new repo_path an importer if needed
|
||||
append = isinstance(repo, (Repo, RepoPath))
|
||||
if append:
|
||||
sys.meta_path.append(repo)
|
||||
return append
|
||||
|
||||
|
||||
@contextmanager
|
||||
def swap(repo_path):
|
||||
"""Temporarily use another RepoPath."""
|
||||
global _path
|
||||
global path
|
||||
|
||||
# swap out _path for repo_path
|
||||
saved = _path
|
||||
saved = path
|
||||
remove_from_meta = set_path(repo_path)
|
||||
|
||||
yield
|
||||
@@ -1140,7 +1140,7 @@ def swap(repo_path):
|
||||
# restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(repo_path)
|
||||
_path = saved
|
||||
path = saved
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
|
@@ -22,7 +22,7 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import spack.build_environment
|
||||
|
@@ -1228,7 +1228,7 @@ def package_class(self):
|
||||
"""Internal package call gets only the class object for a package.
|
||||
Use this to just get package metadata.
|
||||
"""
|
||||
return spack.repo.path().get_pkg_class(self.fullname)
|
||||
return spack.repo.path.get_pkg_class(self.fullname)
|
||||
|
||||
@property
|
||||
def virtual(self):
|
||||
@@ -1244,7 +1244,7 @@ def virtual(self):
|
||||
@staticmethod
|
||||
def is_virtual(name):
|
||||
"""Test if a name is virtual without requiring a Spec."""
|
||||
return (name is not None) and (not spack.repo.path().exists(name))
|
||||
return (name is not None) and (not spack.repo.path.exists(name))
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
@@ -1402,7 +1402,7 @@ def cshort_spec(self):
|
||||
@property
|
||||
def prefix(self):
|
||||
if self._prefix is None:
|
||||
self.prefix = spack.store.store().layout.path_for_spec(self)
|
||||
self.prefix = spack.store.layout.path_for_spec(self)
|
||||
return self._prefix
|
||||
|
||||
@prefix.setter
|
||||
@@ -1675,7 +1675,7 @@ def _concretize_helper(self, presets=None, visited=None):
|
||||
# still need to select a concrete package later.
|
||||
if not self.virtual:
|
||||
import spack.concretize
|
||||
concretizer = spack.concretize.concretizer()
|
||||
concretizer = spack.concretize.concretizer
|
||||
changed |= any(
|
||||
(concretizer.concretize_architecture(self),
|
||||
concretizer.concretize_compiler(self),
|
||||
@@ -1744,7 +1744,7 @@ def _expand_virtual_packages(self):
|
||||
# Get a list of possible replacements in order of
|
||||
# preference.
|
||||
import spack.concretize
|
||||
concretizer = spack.concretize.concretizer()
|
||||
concretizer = spack.concretize.concretizer
|
||||
candidates = concretizer.choose_virtual_or_external(spec)
|
||||
|
||||
# Try the replacements in order, skipping any that cause
|
||||
@@ -1849,7 +1849,7 @@ def concretize(self, tests=False):
|
||||
# we can do it as late as possible to allow as much
|
||||
# compatibility across repositories as possible.
|
||||
if s.namespace is None:
|
||||
s.namespace = spack.repo.path().repo_for_pkg(s.name).namespace
|
||||
s.namespace = spack.repo.path.repo_for_pkg(s.name).namespace
|
||||
|
||||
if s.concrete:
|
||||
continue
|
||||
@@ -3107,7 +3107,7 @@ def write(s, c):
|
||||
elif named_str == 'SPACK_ROOT':
|
||||
out.write(fmt % token_transform(spack.paths.prefix))
|
||||
elif named_str == 'SPACK_INSTALL':
|
||||
out.write(fmt % token_transform(spack.store.store().root))
|
||||
out.write(fmt % token_transform(spack.store.root))
|
||||
elif named_str == 'PREFIX':
|
||||
out.write(fmt % token_transform(self.prefix))
|
||||
elif named_str.startswith('HASH'):
|
||||
@@ -3149,7 +3149,7 @@ def _install_status(self):
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.store().db.get_record(self)
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.installed
|
||||
except KeyError:
|
||||
return None
|
||||
@@ -3159,7 +3159,7 @@ def _installed_explicitly(self):
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.store().db.get_record(self)
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.explicit
|
||||
except KeyError:
|
||||
return None
|
||||
@@ -3382,7 +3382,7 @@ def parse_compiler(self, text):
|
||||
def spec_by_hash(self):
|
||||
self.expect(ID)
|
||||
|
||||
specs = spack.store.store().db.query()
|
||||
specs = spack.store.db.query()
|
||||
matches = [spec for spec in specs if
|
||||
spec.dag_hash()[:len(self.token.value)] == self.token.value]
|
||||
|
||||
|
@@ -408,7 +408,7 @@ def fetch(self, mirror_only=False):
|
||||
url, digest, expand=expand, extension=extension))
|
||||
if self.default_fetcher.cachable:
|
||||
fetchers.insert(
|
||||
0, spack.caches.fetch_cache().fetcher(
|
||||
0, spack.caches.fetch_cache.fetcher(
|
||||
self.mirror_path, digest, expand=expand,
|
||||
extension=extension))
|
||||
|
||||
@@ -455,7 +455,7 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.fetch_cache().store(self.fetcher, self.mirror_path)
|
||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_path)
|
||||
|
||||
def expand_archive(self):
|
||||
"""Changes to the stage directory and attempt to expand the downloaded
|
||||
|
@@ -43,6 +43,9 @@
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.paths
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
@@ -72,7 +75,7 @@ class Store(object):
|
||||
hash_length (int): length of the hashes used in the directory
|
||||
layout; spec hash suffixes will be truncated to this length
|
||||
"""
|
||||
def __init__(self, root, path_scheme, hash_length):
|
||||
def __init__(self, root, path_scheme=None, hash_length=None):
|
||||
self.root = root
|
||||
self.db = spack.database.Database(root)
|
||||
self.layout = spack.directory_layout.YamlDirectoryLayout(
|
||||
@@ -85,19 +88,21 @@ def reindex(self):
|
||||
return self.db.reindex(self.layout)
|
||||
|
||||
|
||||
#: Singleton store instance
|
||||
_store = None
|
||||
|
||||
|
||||
def store():
|
||||
def _store():
|
||||
"""Get the singleton store instance."""
|
||||
global _store
|
||||
root = spack.config.get('config:install_tree', default_root)
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
|
||||
if _store is None:
|
||||
root = spack.config.get('config:install_tree', default_root)
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
return Store(root,
|
||||
spack.config.get('config:install_path_scheme'),
|
||||
spack.config.get('config:install_hash_length'))
|
||||
|
||||
_store = Store(root,
|
||||
spack.config.get('config:install_path_scheme'),
|
||||
spack.config.get('config:install_hash_length'))
|
||||
return _store
|
||||
|
||||
#: Singleton store instance
|
||||
store = llnl.util.lang.Singleton(_store)
|
||||
|
||||
# convenience accessors for parts of the singleton store
|
||||
root = llnl.util.lang.LazyReference(lambda: store.root)
|
||||
db = llnl.util.lang.LazyReference(lambda: store.db)
|
||||
layout = llnl.util.lang.LazyReference(lambda: store.layout)
|
||||
extensions = llnl.util.lang.LazyReference(lambda: store.extensions)
|
||||
|
@@ -44,9 +44,9 @@ def __call__(self, *args, **kwargs):
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_clean', Counter())
|
||||
monkeypatch.setattr(spack.stage, 'purge', Counter())
|
||||
monkeypatch.setattr(
|
||||
spack.caches._fetch_cache, 'destroy', Counter(), raising=False)
|
||||
spack.caches.fetch_cache, 'destroy', Counter(), raising=False)
|
||||
monkeypatch.setattr(
|
||||
spack.caches._misc_cache, 'destroy', Counter())
|
||||
spack.caches.misc_cache, 'destroy', Counter())
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
@@ -69,5 +69,5 @@ def test_function_calls(command_line, counters):
|
||||
# number of times
|
||||
assert spack.package.PackageBase.do_clean.call_count == counters[0]
|
||||
assert spack.stage.purge.call_count == counters[1]
|
||||
assert spack.caches.fetch_cache().destroy.call_count == counters[2]
|
||||
assert spack.caches.misc_cache().destroy.call_count == counters[3]
|
||||
assert spack.caches.fetch_cache.destroy.call_count == counters[2]
|
||||
assert spack.caches.misc_cache.destroy.call_count == counters[3]
|
||||
|
@@ -60,7 +60,7 @@ def test_immediate_installed_dependencies(mock_packages, database):
|
||||
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
|
||||
hashes = set([re.split(r'\s+', l)[0] for l in lines])
|
||||
|
||||
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
|
||||
expected = set([spack.store.db.query_one(s).dag_hash(7)
|
||||
for s in ['mpich', 'callpath^mpich']])
|
||||
|
||||
assert expected == hashes
|
||||
@@ -74,7 +74,7 @@ def test_transitive_installed_dependencies(mock_packages, database):
|
||||
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
|
||||
hashes = set([re.split(r'\s+', l)[0] for l in lines])
|
||||
|
||||
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
|
||||
expected = set([spack.store.db.query_one(s).dag_hash(7)
|
||||
for s in ['zmpi', 'callpath^zmpi', 'fake',
|
||||
'dyninst', 'libdwarf', 'libelf']])
|
||||
|
||||
|
@@ -58,10 +58,10 @@ def test_immediate_installed_dependents(mock_packages, database):
|
||||
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
|
||||
hashes = set([re.split(r'\s+', l)[0] for l in lines])
|
||||
|
||||
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
|
||||
expected = set([spack.store.db.query_one(s).dag_hash(7)
|
||||
for s in ['dyninst', 'libdwarf']])
|
||||
|
||||
libelf = spack.store.store().db.query_one('libelf')
|
||||
libelf = spack.store.db.query_one('libelf')
|
||||
expected = set([d.dag_hash(7) for d in libelf.dependents()])
|
||||
|
||||
assert expected == hashes
|
||||
@@ -75,7 +75,7 @@ def test_transitive_installed_dependents(mock_packages, database):
|
||||
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
|
||||
hashes = set([re.split(r'\s+', l)[0] for l in lines])
|
||||
|
||||
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
|
||||
expected = set([spack.store.db.query_one(s).dag_hash(7)
|
||||
for s in ['zmpi', 'callpath^zmpi', 'mpileaks^zmpi']])
|
||||
|
||||
assert expected == hashes
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import filecmp
|
||||
from six.moves import builtins
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -316,28 +317,23 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.usefixtures(
|
||||
'mock_packages', 'mock_archive', 'mock_fetch', 'config', 'install_mockery'
|
||||
)
|
||||
@pytest.mark.parametrize('exc_typename,msg', [
|
||||
('RuntimeError', 'something weird happened'),
|
||||
# ('RuntimeError', 'something weird happened'),
|
||||
('KeyboardInterrupt', 'Ctrl-C strikes again')
|
||||
])
|
||||
def test_junit_output_with_errors(tmpdir, monkeypatch, exc_typename, msg):
|
||||
def test_junit_output_with_errors(
|
||||
exc_typename, msg,
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery,
|
||||
config, tmpdir, monkeypatch):
|
||||
|
||||
def just_throw(*args, **kwargs):
|
||||
from six.moves import builtins
|
||||
exc_type = getattr(builtins, exc_typename)
|
||||
raise exc_type(msg)
|
||||
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_install', just_throw)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
install(
|
||||
'--log-format=junit', '--log-file=test.xml',
|
||||
'libdwarf',
|
||||
fail_on_error=False
|
||||
)
|
||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||
|
||||
files = tmpdir.listdir()
|
||||
filename = tmpdir.join('test.xml')
|
||||
@@ -387,7 +383,7 @@ def test_extra_files_are_archived(mock_packages, mock_archive, mock_fetch,
|
||||
install('archive-files')
|
||||
|
||||
archive_dir = os.path.join(
|
||||
spack.store.store().layout.metadata_path(s), 'archived-files'
|
||||
spack.store.layout.metadata_path(s), 'archived-files'
|
||||
)
|
||||
config_log = os.path.join(archive_dir, 'config.log')
|
||||
assert os.path.exists(config_log)
|
||||
|
@@ -61,7 +61,7 @@ def test_recursive_uninstall():
|
||||
"""Test recursive uninstall."""
|
||||
uninstall('-y', '-a', '--dependents', 'callpath')
|
||||
|
||||
all_specs = spack.store.store().layout.all_specs()
|
||||
all_specs = spack.store.layout.all_specs()
|
||||
assert len(all_specs) == 8
|
||||
# query specs with multiple configurations
|
||||
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
|
||||
|
@@ -162,7 +162,7 @@ def test_concretize_with_provides_when(self):
|
||||
"""Make sure insufficient versions of MPI are not in providers list when
|
||||
we ask for some advanced version.
|
||||
"""
|
||||
repo = spack.repo.path()
|
||||
repo = spack.repo.path
|
||||
assert not any(
|
||||
s.satisfies('mpich2@:1.0') for s in repo.providers_for('mpi@2.1')
|
||||
)
|
||||
@@ -182,7 +182,7 @@ def test_concretize_with_provides_when(self):
|
||||
def test_provides_handles_multiple_providers_of_same_vesrion(self):
|
||||
"""
|
||||
"""
|
||||
providers = spack.repo.path().providers_for('mpi@3.0')
|
||||
providers = spack.repo.path.providers_for('mpi@3.0')
|
||||
|
||||
# Note that providers are repo-specific, so we don't misinterpret
|
||||
# providers, but vdeps are not namespace-specific, so we can
|
||||
|
@@ -42,7 +42,7 @@ def concretize_scope(config, tmpdir):
|
||||
|
||||
config.pop_scope()
|
||||
spack.package_prefs.PackagePrefs.clear_caches()
|
||||
spack.repo.path()._provider_index = None
|
||||
spack.repo.path._provider_index = None
|
||||
|
||||
|
||||
def concretize(abstract_spec):
|
||||
|
@@ -61,15 +61,15 @@
|
||||
@pytest.fixture()
|
||||
def config(tmpdir):
|
||||
"""Mocks the configuration scope."""
|
||||
real_configuration = spack.config._configuration
|
||||
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']]
|
||||
config = spack.config.Configuration(*scopes)
|
||||
spack.config._configuration = config
|
||||
real_configuration = spack.config.config
|
||||
|
||||
yield config
|
||||
spack.config.config = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']])
|
||||
|
||||
spack.config._configuration = real_configuration
|
||||
yield spack.config.config
|
||||
|
||||
spack.config.config = real_configuration
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -242,7 +242,7 @@ def test_write_key_to_disk(config, compiler_specs):
|
||||
spack.config.set('compilers', b_comps['compilers'], scope='high')
|
||||
|
||||
# Clear caches so we're forced to read from disk.
|
||||
spack.config.config().clear_caches()
|
||||
spack.config.config.clear_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
||||
@@ -255,7 +255,7 @@ def test_write_to_same_priority_file(config, compiler_specs):
|
||||
spack.config.set('compilers', b_comps['compilers'], scope='low')
|
||||
|
||||
# Clear caches so we're forced to read from disk.
|
||||
spack.config.config().clear_caches()
|
||||
spack.config.config.clear_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
|
||||
|
@@ -155,7 +155,7 @@ def fetch(self):
|
||||
def __str__(self):
|
||||
return "[mock fetch cache]"
|
||||
|
||||
monkeypatch.setattr(spack.caches, '_fetch_cache', MockCache())
|
||||
monkeypatch.setattr(spack.caches, 'fetch_cache', MockCache())
|
||||
|
||||
|
||||
# FIXME: The lines below should better be added to a fixture with
|
||||
@@ -242,148 +242,104 @@ def config(configuration_dir):
|
||||
# Set up a mock config scope
|
||||
spack.package_prefs.PackagePrefs.clear_caches()
|
||||
|
||||
real_configuration = spack.config._configuration
|
||||
real_configuration = spack.config.config
|
||||
|
||||
scopes = [
|
||||
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
config = spack.config.Configuration(*scopes)
|
||||
spack.config._configuration = config
|
||||
spack.config.config = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']])
|
||||
|
||||
yield config
|
||||
yield spack.config.config
|
||||
|
||||
spack.config._configuration = real_configuration
|
||||
spack.config.config = real_configuration
|
||||
spack.package_prefs.PackagePrefs.clear_caches()
|
||||
|
||||
|
||||
def _populate(mock_db):
|
||||
"""Populate a mock database with packages.
|
||||
|
||||
Here is what the mock DB looks like:
|
||||
|
||||
o mpileaks o mpileaks' o mpileaks''
|
||||
|\ |\ |\
|
||||
| o callpath | o callpath' | o callpath''
|
||||
|/| |/| |/|
|
||||
o | mpich o | mpich2 o | zmpi
|
||||
| | o | fake
|
||||
| | |
|
||||
| |______________/
|
||||
| .____________/
|
||||
|/
|
||||
o dyninst
|
||||
|\
|
||||
| o libdwarf
|
||||
|/
|
||||
o libelf
|
||||
"""
|
||||
def _install(spec):
|
||||
s = spack.spec.Spec(spec).concretized()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.do_install(fake=True)
|
||||
|
||||
# Transaction used to avoid repeated writes.
|
||||
with mock_db.write_transaction():
|
||||
_install('mpileaks ^mpich')
|
||||
_install('mpileaks ^mpich2')
|
||||
_install('mpileaks ^zmpi')
|
||||
_install('externaltest')
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def database(tmpdir_factory, mock_packages, config):
|
||||
"""Creates a mock database with some packages installed note that
|
||||
the ref count for dyninst here will be 3, as it's recycled
|
||||
across each install.
|
||||
"""
|
||||
|
||||
# Here is what the mock DB looks like:
|
||||
#
|
||||
# o mpileaks o mpileaks' o mpileaks''
|
||||
# |\ |\ |\
|
||||
# | o callpath | o callpath' | o callpath''
|
||||
# |/| |/| |/|
|
||||
# o | mpich o | mpich2 o | zmpi
|
||||
# | | o | fake
|
||||
# | | |
|
||||
# | |______________/
|
||||
# | .____________/
|
||||
# |/
|
||||
# o dyninst
|
||||
# |\
|
||||
# | o libdwarf
|
||||
# |/
|
||||
# o libelf
|
||||
# save the real store
|
||||
real_store = spack.store.store
|
||||
|
||||
# Make a fake install directory
|
||||
install_path = tmpdir_factory.mktemp('install_for_database')
|
||||
spack_install_path = spack.store.store().root
|
||||
|
||||
spack.store.store().root = str(install_path)
|
||||
install_layout = spack.directory_layout.YamlDirectoryLayout(
|
||||
str(install_path))
|
||||
spack_install_layout = spack.store.store().layout
|
||||
spack.store.store().layout = install_layout
|
||||
# Make fake store (database and install layout)
|
||||
tmp_store = spack.store.Store(str(install_path))
|
||||
spack.store.store = tmp_store
|
||||
|
||||
# Make fake database and fake install directory.
|
||||
install_db = spack.database.Database(str(install_path))
|
||||
spack_install_db = spack.store.store().db
|
||||
spack.store.store().db = install_db
|
||||
_populate(tmp_store.db)
|
||||
|
||||
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
|
||||
Database = collections.namedtuple(
|
||||
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'])
|
||||
yield tmp_store.db
|
||||
|
||||
real = Entry(
|
||||
path=spack_install_path,
|
||||
layout=spack_install_layout,
|
||||
db=spack_install_db)
|
||||
mock = Entry(path=install_path, layout=install_layout, db=install_db)
|
||||
|
||||
def _install(spec):
|
||||
s = spack.spec.Spec(spec)
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.do_install(fake=True)
|
||||
|
||||
def _uninstall(spec):
|
||||
spec.package.do_uninstall(spec)
|
||||
|
||||
def _refresh():
|
||||
with spack.store.store().db.write_transaction():
|
||||
for spec in spack.store.store().db.query():
|
||||
_uninstall(spec)
|
||||
_install('mpileaks ^mpich')
|
||||
_install('mpileaks ^mpich2')
|
||||
_install('mpileaks ^zmpi')
|
||||
_install('externaltest')
|
||||
|
||||
t = Database(
|
||||
real=real,
|
||||
mock=mock,
|
||||
install=_install,
|
||||
uninstall=_uninstall,
|
||||
refresh=_refresh)
|
||||
|
||||
# Transaction used to avoid repeated writes.
|
||||
with spack.store.store().db.write_transaction():
|
||||
t.install('mpileaks ^mpich')
|
||||
t.install('mpileaks ^mpich2')
|
||||
t.install('mpileaks ^zmpi')
|
||||
t.install('externaltest')
|
||||
|
||||
yield t
|
||||
|
||||
with spack.store.store().db.write_transaction():
|
||||
for spec in spack.store.store().db.query():
|
||||
with tmp_store.db.write_transaction():
|
||||
for spec in tmp_store.db.query():
|
||||
if spec.package.installed:
|
||||
t.uninstall(spec)
|
||||
PackageBase.uninstall_by_spec(spec, force=True)
|
||||
else:
|
||||
spack.store.store().db.remove(spec)
|
||||
tmp_store.db.remove(spec)
|
||||
|
||||
install_path.remove(rec=1)
|
||||
spack.store.store().root = spack_install_path
|
||||
spack.store.store().layout = spack_install_layout
|
||||
spack.store.store().db = spack_install_db
|
||||
spack.store.store = real_store
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def refresh_db_on_exit(database):
|
||||
""""Restores the state of the database after a test."""
|
||||
yield
|
||||
database.refresh()
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_database(database):
|
||||
"""For tests that need to modify the database instance."""
|
||||
yield database
|
||||
with database.write_transaction():
|
||||
for spec in spack.store.db.query():
|
||||
PackageBase.uninstall_by_spec(spec, force=True)
|
||||
_populate(database)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@pytest.fixture(scope='function')
|
||||
def install_mockery(tmpdir, config, mock_packages):
|
||||
"""Hooks a fake install directory, DB, and stage directory into Spack."""
|
||||
layout = spack.store.store().layout
|
||||
extensions = spack.store.store().extensions
|
||||
db = spack.store.store().db
|
||||
new_opt = str(tmpdir.join('opt'))
|
||||
|
||||
# Use a fake install directory to avoid conflicts bt/w
|
||||
# installed pkgs and mock packages.
|
||||
store = spack.store.store()
|
||||
store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
|
||||
store.extensions = spack.directory_layout.YamlExtensionsLayout(
|
||||
new_opt, spack.store.store().layout)
|
||||
store.db = spack.database.Database(new_opt)
|
||||
real_store = spack.store.store
|
||||
spack.store.store = spack.store.Store(str(tmpdir.join('opt')))
|
||||
|
||||
# We use a fake package, so temporarily disable checksumming
|
||||
with spack.config.override('config:checksum', False):
|
||||
yield
|
||||
|
||||
# Restore Spack's layout.
|
||||
store.layout = layout
|
||||
store.extensions = extensions
|
||||
store.db = db
|
||||
spack.store.store = real_store
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@@ -47,16 +47,16 @@ def _print_ref_counts():
|
||||
recs = []
|
||||
|
||||
def add_rec(spec):
|
||||
cspecs = spack.store.store().db.query(spec, installed=any)
|
||||
cspecs = spack.store.db.query(spec, installed=any)
|
||||
|
||||
if not cspecs:
|
||||
recs.append("[ %-7s ] %-20s-" % ('', spec))
|
||||
else:
|
||||
key = cspecs[0].dag_hash()
|
||||
rec = spack.store.store().db.get_record(cspecs[0])
|
||||
rec = spack.store.db.get_record(cspecs[0])
|
||||
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
|
||||
|
||||
with spack.store.store().db.read_transaction():
|
||||
with spack.store.db.read_transaction():
|
||||
add_rec('mpileaks ^mpich')
|
||||
add_rec('callpath ^mpich')
|
||||
add_rec('mpich')
|
||||
@@ -79,7 +79,7 @@ def add_rec(spec):
|
||||
|
||||
def _check_merkleiness():
|
||||
"""Ensure the spack database is a valid merkle graph."""
|
||||
all_specs = spack.store.store().db.query(installed=any)
|
||||
all_specs = spack.store.db.query(installed=any)
|
||||
|
||||
seen = {}
|
||||
for spec in all_specs:
|
||||
@@ -91,10 +91,10 @@ def _check_merkleiness():
|
||||
assert seen[hash_key] == id(dep)
|
||||
|
||||
|
||||
def _check_db_sanity(install_db):
|
||||
def _check_db_sanity(database):
|
||||
"""Utiilty function to check db against install layout."""
|
||||
pkg_in_layout = sorted(spack.store.store().layout.all_specs())
|
||||
actual = sorted(install_db.query())
|
||||
pkg_in_layout = sorted(spack.store.layout.all_specs())
|
||||
actual = sorted(database.query())
|
||||
|
||||
externals = sorted([x for x in actual if x.external])
|
||||
nexpected = len(pkg_in_layout) + len(externals)
|
||||
@@ -109,19 +109,19 @@ def _check_db_sanity(install_db):
|
||||
_check_merkleiness()
|
||||
|
||||
|
||||
def _check_remove_and_add_package(install_db, spec):
|
||||
def _check_remove_and_add_package(database, spec):
|
||||
"""Remove a spec from the DB, then add it and make sure everything's
|
||||
still ok once it is added. This checks that it was
|
||||
removed, that it's back when added again, and that ref
|
||||
counts are consistent.
|
||||
"""
|
||||
original = install_db.query()
|
||||
install_db._check_ref_counts()
|
||||
original = database.query()
|
||||
database._check_ref_counts()
|
||||
|
||||
# Remove spec
|
||||
concrete_spec = install_db.remove(spec)
|
||||
install_db._check_ref_counts()
|
||||
remaining = install_db.query()
|
||||
concrete_spec = database.remove(spec)
|
||||
database._check_ref_counts()
|
||||
remaining = database.query()
|
||||
|
||||
# ensure spec we removed is gone
|
||||
assert len(original) - 1 == len(remaining)
|
||||
@@ -129,14 +129,14 @@ def _check_remove_and_add_package(install_db, spec):
|
||||
assert concrete_spec not in remaining
|
||||
|
||||
# add it back and make sure everything is ok.
|
||||
install_db.add(concrete_spec, spack.store.store().layout)
|
||||
installed = install_db.query()
|
||||
database.add(concrete_spec, spack.store.layout)
|
||||
installed = database.query()
|
||||
assert concrete_spec in installed
|
||||
assert installed == original
|
||||
|
||||
# sanity check against direcory layout and check ref counts.
|
||||
_check_db_sanity(install_db)
|
||||
install_db._check_ref_counts()
|
||||
_check_db_sanity(database)
|
||||
database._check_ref_counts()
|
||||
|
||||
|
||||
def _mock_install(spec):
|
||||
@@ -147,7 +147,7 @@ def _mock_install(spec):
|
||||
|
||||
|
||||
def _mock_remove(spec):
|
||||
specs = spack.store.store().db.query(spec)
|
||||
specs = spack.store.db.query(spec)
|
||||
assert len(specs) == 1
|
||||
spec = specs[0]
|
||||
spec.package.do_uninstall(spec)
|
||||
@@ -156,8 +156,7 @@ def _mock_remove(spec):
|
||||
def test_default_queries(database):
|
||||
# Testing a package whose name *doesn't* start with 'lib'
|
||||
# to ensure the library has 'lib' prepended to the name
|
||||
install_db = database.mock.db
|
||||
rec = install_db.get_record('zmpi')
|
||||
rec = database.get_record('zmpi')
|
||||
|
||||
spec = rec.spec
|
||||
|
||||
@@ -176,8 +175,7 @@ def test_default_queries(database):
|
||||
|
||||
# Testing a package whose name *does* start with 'lib'
|
||||
# to ensure the library doesn't have a double 'lib' prefix
|
||||
install_db = database.mock.db
|
||||
rec = install_db.get_record('libelf')
|
||||
rec = database.get_record('libelf')
|
||||
|
||||
spec = rec.spec
|
||||
|
||||
@@ -197,16 +195,15 @@ def test_default_queries(database):
|
||||
|
||||
def test_005_db_exists(database):
|
||||
"""Make sure db cache file exists after creating."""
|
||||
install_path = database.mock.path
|
||||
index_file = install_path.join('.spack-db', 'index.json')
|
||||
lock_file = install_path.join('.spack-db', 'lock')
|
||||
index_file = os.path.join(database.root, '.spack-db', 'index.json')
|
||||
lock_file = os.path.join(database.root, '.spack-db', 'lock')
|
||||
assert os.path.exists(str(index_file))
|
||||
assert os.path.exists(str(lock_file))
|
||||
|
||||
|
||||
def test_010_all_install_sanity(database):
|
||||
"""Ensure that the install layout reflects what we think it does."""
|
||||
all_specs = spack.store.store().layout.all_specs()
|
||||
all_specs = spack.store.layout.all_specs()
|
||||
assert len(all_specs) == 14
|
||||
|
||||
# Query specs with multiple configurations
|
||||
@@ -241,12 +238,12 @@ def test_010_all_install_sanity(database):
|
||||
|
||||
def test_015_write_and_read(database):
|
||||
# write and read DB
|
||||
with spack.store.store().db.write_transaction():
|
||||
specs = spack.store.store().db.query()
|
||||
recs = [spack.store.store().db.get_record(s) for s in specs]
|
||||
with spack.store.db.write_transaction():
|
||||
specs = spack.store.db.query()
|
||||
recs = [spack.store.db.get_record(s) for s in specs]
|
||||
|
||||
for spec, rec in zip(specs, recs):
|
||||
new_rec = spack.store.store().db.get_record(spec)
|
||||
new_rec = spack.store.db.get_record(spec)
|
||||
assert new_rec.ref_count == rec.ref_count
|
||||
assert new_rec.spec == rec.spec
|
||||
assert new_rec.path == rec.path
|
||||
@@ -255,23 +252,20 @@ def test_015_write_and_read(database):
|
||||
|
||||
def test_020_db_sanity(database):
|
||||
"""Make sure query() returns what's actually in the db."""
|
||||
install_db = database.mock.db
|
||||
_check_db_sanity(install_db)
|
||||
_check_db_sanity(database)
|
||||
|
||||
|
||||
def test_025_reindex(database):
|
||||
"""Make sure reindex works and ref counts are valid."""
|
||||
install_db = database.mock.db
|
||||
spack.store.store().reindex()
|
||||
_check_db_sanity(install_db)
|
||||
spack.store.store.reindex()
|
||||
_check_db_sanity(database)
|
||||
|
||||
|
||||
def test_030_db_sanity_from_another_process(database, refresh_db_on_exit):
|
||||
install_db = database.mock.db
|
||||
|
||||
def test_030_db_sanity_from_another_process(mutable_database):
|
||||
def read_and_modify():
|
||||
_check_db_sanity(install_db) # check that other process can read DB
|
||||
with install_db.write_transaction():
|
||||
# check that other process can read DB
|
||||
_check_db_sanity(mutable_database)
|
||||
with mutable_database.write_transaction():
|
||||
_mock_remove('mpileaks ^zmpi')
|
||||
|
||||
p = multiprocessing.Process(target=read_and_modify, args=())
|
||||
@@ -279,181 +273,166 @@ def read_and_modify():
|
||||
p.join()
|
||||
|
||||
# ensure child process change is visible in parent process
|
||||
with install_db.read_transaction():
|
||||
assert len(install_db.query('mpileaks ^zmpi')) == 0
|
||||
with mutable_database.read_transaction():
|
||||
assert len(mutable_database.query('mpileaks ^zmpi')) == 0
|
||||
|
||||
|
||||
def test_040_ref_counts(database):
|
||||
"""Ensure that we got ref counts right when we read the DB."""
|
||||
install_db = database.mock.db
|
||||
install_db._check_ref_counts()
|
||||
database._check_ref_counts()
|
||||
|
||||
|
||||
def test_050_basic_query(database):
|
||||
"""Ensure querying database is consistent with what is installed."""
|
||||
install_db = database.mock.db
|
||||
# query everything
|
||||
assert len(spack.store.store().db.query()) == 16
|
||||
assert len(spack.store.db.query()) == 16
|
||||
|
||||
# query specs with multiple configurations
|
||||
mpileaks_specs = install_db.query('mpileaks')
|
||||
callpath_specs = install_db.query('callpath')
|
||||
mpi_specs = install_db.query('mpi')
|
||||
mpileaks_specs = database.query('mpileaks')
|
||||
callpath_specs = database.query('callpath')
|
||||
mpi_specs = database.query('mpi')
|
||||
|
||||
assert len(mpileaks_specs) == 3
|
||||
assert len(callpath_specs) == 3
|
||||
assert len(mpi_specs) == 3
|
||||
|
||||
# query specs with single configurations
|
||||
dyninst_specs = install_db.query('dyninst')
|
||||
libdwarf_specs = install_db.query('libdwarf')
|
||||
libelf_specs = install_db.query('libelf')
|
||||
dyninst_specs = database.query('dyninst')
|
||||
libdwarf_specs = database.query('libdwarf')
|
||||
libelf_specs = database.query('libelf')
|
||||
|
||||
assert len(dyninst_specs) == 1
|
||||
assert len(libdwarf_specs) == 1
|
||||
assert len(libelf_specs) == 1
|
||||
|
||||
# Query by dependency
|
||||
assert len(install_db.query('mpileaks ^mpich')) == 1
|
||||
assert len(install_db.query('mpileaks ^mpich2')) == 1
|
||||
assert len(install_db.query('mpileaks ^zmpi')) == 1
|
||||
assert len(database.query('mpileaks ^mpich')) == 1
|
||||
assert len(database.query('mpileaks ^mpich2')) == 1
|
||||
assert len(database.query('mpileaks ^zmpi')) == 1
|
||||
|
||||
# Query by date
|
||||
assert len(install_db.query(start_date=datetime.datetime.min)) == 16
|
||||
assert len(install_db.query(start_date=datetime.datetime.max)) == 0
|
||||
assert len(install_db.query(end_date=datetime.datetime.min)) == 0
|
||||
assert len(install_db.query(end_date=datetime.datetime.max)) == 16
|
||||
assert len(database.query(start_date=datetime.datetime.min)) == 16
|
||||
assert len(database.query(start_date=datetime.datetime.max)) == 0
|
||||
assert len(database.query(end_date=datetime.datetime.min)) == 0
|
||||
assert len(database.query(end_date=datetime.datetime.max)) == 16
|
||||
|
||||
|
||||
def test_060_remove_and_add_root_package(database):
|
||||
install_db = database.mock.db
|
||||
_check_remove_and_add_package(install_db, 'mpileaks ^mpich')
|
||||
_check_remove_and_add_package(database, 'mpileaks ^mpich')
|
||||
|
||||
|
||||
def test_070_remove_and_add_dependency_package(database):
|
||||
install_db = database.mock.db
|
||||
_check_remove_and_add_package(install_db, 'dyninst')
|
||||
_check_remove_and_add_package(database, 'dyninst')
|
||||
|
||||
|
||||
def test_080_root_ref_counts(database):
|
||||
install_db = database.mock.db
|
||||
rec = install_db.get_record('mpileaks ^mpich')
|
||||
rec = database.get_record('mpileaks ^mpich')
|
||||
|
||||
# Remove a top-level spec from the DB
|
||||
install_db.remove('mpileaks ^mpich')
|
||||
database.remove('mpileaks ^mpich')
|
||||
|
||||
# record no longer in DB
|
||||
assert install_db.query('mpileaks ^mpich', installed=any) == []
|
||||
assert database.query('mpileaks ^mpich', installed=any) == []
|
||||
|
||||
# record's deps have updated ref_counts
|
||||
assert install_db.get_record('callpath ^mpich').ref_count == 0
|
||||
assert install_db.get_record('mpich').ref_count == 1
|
||||
assert database.get_record('callpath ^mpich').ref_count == 0
|
||||
assert database.get_record('mpich').ref_count == 1
|
||||
|
||||
# Put the spec back
|
||||
install_db.add(rec.spec, spack.store.store().layout)
|
||||
database.add(rec.spec, spack.store.layout)
|
||||
|
||||
# record is present again
|
||||
assert len(install_db.query('mpileaks ^mpich', installed=any)) == 1
|
||||
assert len(database.query('mpileaks ^mpich', installed=any)) == 1
|
||||
|
||||
# dependencies have ref counts updated
|
||||
assert install_db.get_record('callpath ^mpich').ref_count == 1
|
||||
assert install_db.get_record('mpich').ref_count == 2
|
||||
assert database.get_record('callpath ^mpich').ref_count == 1
|
||||
assert database.get_record('mpich').ref_count == 2
|
||||
|
||||
|
||||
def test_090_non_root_ref_counts(database):
|
||||
install_db = database.mock.db
|
||||
|
||||
install_db.get_record('mpileaks ^mpich')
|
||||
install_db.get_record('callpath ^mpich')
|
||||
database.get_record('mpileaks ^mpich')
|
||||
database.get_record('callpath ^mpich')
|
||||
|
||||
# "force remove" a non-root spec from the DB
|
||||
install_db.remove('callpath ^mpich')
|
||||
database.remove('callpath ^mpich')
|
||||
|
||||
# record still in DB but marked uninstalled
|
||||
assert install_db.query('callpath ^mpich', installed=True) == []
|
||||
assert len(install_db.query('callpath ^mpich', installed=any)) == 1
|
||||
assert database.query('callpath ^mpich', installed=True) == []
|
||||
assert len(database.query('callpath ^mpich', installed=any)) == 1
|
||||
|
||||
# record and its deps have same ref_counts
|
||||
assert install_db.get_record(
|
||||
assert database.get_record(
|
||||
'callpath ^mpich', installed=any
|
||||
).ref_count == 1
|
||||
assert install_db.get_record('mpich').ref_count == 2
|
||||
assert database.get_record('mpich').ref_count == 2
|
||||
|
||||
# remove only dependent of uninstalled callpath record
|
||||
install_db.remove('mpileaks ^mpich')
|
||||
database.remove('mpileaks ^mpich')
|
||||
|
||||
# record and parent are completely gone.
|
||||
assert install_db.query('mpileaks ^mpich', installed=any) == []
|
||||
assert install_db.query('callpath ^mpich', installed=any) == []
|
||||
assert database.query('mpileaks ^mpich', installed=any) == []
|
||||
assert database.query('callpath ^mpich', installed=any) == []
|
||||
|
||||
# mpich ref count updated properly.
|
||||
mpich_rec = install_db.get_record('mpich')
|
||||
mpich_rec = database.get_record('mpich')
|
||||
assert mpich_rec.ref_count == 0
|
||||
|
||||
|
||||
def test_100_no_write_with_exception_on_remove(database):
|
||||
install_db = database.mock.db
|
||||
|
||||
def fail_while_writing():
|
||||
with install_db.write_transaction():
|
||||
with database.write_transaction():
|
||||
_mock_remove('mpileaks ^zmpi')
|
||||
raise Exception()
|
||||
|
||||
with install_db.read_transaction():
|
||||
assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
|
||||
with database.read_transaction():
|
||||
assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
|
||||
|
||||
with pytest.raises(Exception):
|
||||
fail_while_writing()
|
||||
|
||||
# reload DB and make sure zmpi is still there.
|
||||
with install_db.read_transaction():
|
||||
assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
|
||||
with database.read_transaction():
|
||||
assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
|
||||
|
||||
|
||||
def test_110_no_write_with_exception_on_install(database):
|
||||
install_db = database.mock.db
|
||||
|
||||
def fail_while_writing():
|
||||
with install_db.write_transaction():
|
||||
with database.write_transaction():
|
||||
_mock_install('cmake')
|
||||
raise Exception()
|
||||
|
||||
with install_db.read_transaction():
|
||||
assert install_db.query('cmake', installed=any) == []
|
||||
with database.read_transaction():
|
||||
assert database.query('cmake', installed=any) == []
|
||||
|
||||
with pytest.raises(Exception):
|
||||
fail_while_writing()
|
||||
|
||||
# reload DB and make sure cmake was not written.
|
||||
with install_db.read_transaction():
|
||||
assert install_db.query('cmake', installed=any) == []
|
||||
with database.read_transaction():
|
||||
assert database.query('cmake', installed=any) == []
|
||||
|
||||
|
||||
def test_115_reindex_with_packages_not_in_repo(database, refresh_db_on_exit):
|
||||
install_db = database.mock.db
|
||||
|
||||
def test_115_reindex_with_packages_not_in_repo(mutable_database):
|
||||
# Dont add any package definitions to this repository, the idea is that
|
||||
# packages should not have to be defined in the repository once they
|
||||
# are installed
|
||||
with spack.repo.swap(MockPackageMultiRepo([])):
|
||||
spack.store.store().reindex()
|
||||
_check_db_sanity(install_db)
|
||||
spack.store.store.reindex()
|
||||
_check_db_sanity(mutable_database)
|
||||
|
||||
|
||||
def test_external_entries_in_db(database):
|
||||
install_db = database.mock.db
|
||||
|
||||
rec = install_db.get_record('mpileaks ^zmpi')
|
||||
rec = database.get_record('mpileaks ^zmpi')
|
||||
assert rec.spec.external_path is None
|
||||
assert rec.spec.external_module is None
|
||||
|
||||
rec = install_db.get_record('externaltool')
|
||||
rec = database.get_record('externaltool')
|
||||
assert rec.spec.external_path == '/path/to/external_tool'
|
||||
assert rec.spec.external_module is None
|
||||
assert rec.explicit is False
|
||||
|
||||
rec.spec.package.do_install(fake=True, explicit=True)
|
||||
rec = install_db.get_record('externaltool')
|
||||
rec = database.get_record('externaltool')
|
||||
assert rec.spec.external_path == '/path/to/external_tool'
|
||||
assert rec.spec.external_module is None
|
||||
assert rec.explicit is True
|
||||
|
@@ -104,7 +104,7 @@ def test_read_and_write_spec(
|
||||
layout.
|
||||
"""
|
||||
layout, tmpdir = layout_and_dir
|
||||
packages = list(spack.repo.path().all_packages())[:max_packages]
|
||||
packages = list(spack.repo.path.all_packages())[:max_packages]
|
||||
|
||||
for pkg in packages:
|
||||
if pkg.name.startswith('external'):
|
||||
@@ -226,7 +226,7 @@ def test_handle_unknown_package(
|
||||
def test_find(layout_and_dir, config, mock_packages):
|
||||
"""Test that finding specs within an install layout works."""
|
||||
layout, _ = layout_and_dir
|
||||
packages = list(spack.repo.path().all_packages())[:max_packages]
|
||||
packages = list(spack.repo.path.all_packages())[:max_packages]
|
||||
|
||||
# Create install prefixes for all packages in the list
|
||||
installed_specs = {}
|
||||
|
@@ -65,7 +65,7 @@ def test_all_virtual_packages_have_default_providers():
|
||||
"""All virtual packages must have a default provider explicitly set."""
|
||||
defaults = spack.config.get('packages', scope='defaults')
|
||||
default_providers = defaults['all']['providers']
|
||||
providers = spack.repo.path().provider_index.providers
|
||||
providers = spack.repo.path.provider_index.providers
|
||||
|
||||
for provider in providers:
|
||||
assert provider in default_providers
|
||||
|
@@ -261,7 +261,7 @@ def _check_hash_parse(self, spec):
|
||||
|
||||
@pytest.mark.db
|
||||
def test_spec_by_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
specs = database.query()
|
||||
assert len(specs) # make sure something's in the DB
|
||||
|
||||
for spec in specs:
|
||||
@@ -269,9 +269,9 @@ def test_spec_by_hash(self, database):
|
||||
|
||||
@pytest.mark.db
|
||||
def test_dep_spec_by_hash(self, database):
|
||||
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
|
||||
zmpi = database.mock.db.query_one('zmpi')
|
||||
fake = database.mock.db.query_one('fake')
|
||||
mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
|
||||
zmpi = database.query_one('zmpi')
|
||||
fake = database.query_one('fake')
|
||||
|
||||
assert 'fake' in mpileaks_zmpi
|
||||
assert 'zmpi' in mpileaks_zmpi
|
||||
@@ -297,8 +297,8 @@ def test_dep_spec_by_hash(self, database):
|
||||
|
||||
@pytest.mark.db
|
||||
def test_multiple_specs_with_hash(self, database):
|
||||
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
|
||||
callpath_mpich2 = database.mock.db.query_one('callpath ^mpich2')
|
||||
mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
|
||||
callpath_mpich2 = database.query_one('callpath ^mpich2')
|
||||
|
||||
# name + hash + separate hash
|
||||
specs = sp.parse('mpileaks /' + mpileaks_zmpi.dag_hash() +
|
||||
@@ -336,8 +336,8 @@ def test_ambiguous_hash(self, database):
|
||||
x2 = Spec('a')
|
||||
x2._hash = 'xx'
|
||||
x2._concrete = True
|
||||
database.mock.db.add(x1, spack.store.store().layout)
|
||||
database.mock.db.add(x2, spack.store.store().layout)
|
||||
database.add(x1, spack.store.layout)
|
||||
database.add(x2, spack.store.layout)
|
||||
|
||||
# ambiguity in first hash character
|
||||
self._check_raises(AmbiguousHashError, ['/x'])
|
||||
@@ -347,11 +347,11 @@ def test_ambiguous_hash(self, database):
|
||||
|
||||
@pytest.mark.db
|
||||
def test_invalid_hash(self, database):
|
||||
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
|
||||
zmpi = database.mock.db.query_one('zmpi')
|
||||
mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
|
||||
zmpi = database.query_one('zmpi')
|
||||
|
||||
mpileaks_mpich = database.mock.db.query_one('mpileaks ^mpich')
|
||||
mpich = database.mock.db.query_one('mpich')
|
||||
mpileaks_mpich = database.query_one('mpileaks ^mpich')
|
||||
mpich = database.query_one('mpich')
|
||||
|
||||
# name + incompatible hash
|
||||
self._check_raises(InvalidHashError, [
|
||||
@@ -366,7 +366,7 @@ def test_invalid_hash(self, database):
|
||||
@pytest.mark.db
|
||||
def test_nonexistent_hash(self, database):
|
||||
"""Ensure we get errors for nonexistant hashes."""
|
||||
specs = database.mock.db.query()
|
||||
specs = database.query()
|
||||
|
||||
# This hash shouldn't be in the test DB. What are the odds :)
|
||||
no_such_hash = 'aaaaaaaaaaaaaaa'
|
||||
@@ -385,11 +385,11 @@ def test_redundant_spec(self, database):
|
||||
specs only raise errors if constraints cause a contradiction?
|
||||
|
||||
"""
|
||||
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
|
||||
callpath_zmpi = database.mock.db.query_one('callpath ^zmpi')
|
||||
dyninst = database.mock.db.query_one('dyninst')
|
||||
mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
|
||||
callpath_zmpi = database.query_one('callpath ^zmpi')
|
||||
dyninst = database.query_one('dyninst')
|
||||
|
||||
mpileaks_mpich2 = database.mock.db.query_one('mpileaks ^mpich2')
|
||||
mpileaks_mpich2 = database.query_one('mpileaks ^mpich2')
|
||||
|
||||
redundant_specs = [
|
||||
# redudant compiler
|
||||
|
@@ -131,7 +131,7 @@ def package_hash(spec, content=None):
|
||||
def package_ast(spec):
|
||||
spec = Spec(spec)
|
||||
|
||||
filename = spack.repo.path().filename_for_package_name(spec.name)
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
with open(filename) as f:
|
||||
text = f.read()
|
||||
root = ast.parse(text)
|
||||
|
@@ -50,8 +50,8 @@ class Aspell(AutotoolsPackage):
|
||||
# - dest_dir instead of self.prefix in tree.(find_conflict|merge)()
|
||||
def activate(self, extension, **kwargs):
|
||||
extensions_layout = kwargs.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
if extensions_layout is not spack.store.store().extensions:
|
||||
spack.store.extensions)
|
||||
if extensions_layout is not spack.store.extensions:
|
||||
raise ExtensionError(
|
||||
'aspell does not support non-global extensions')
|
||||
|
||||
@@ -60,7 +60,7 @@ def activate(self, extension, **kwargs):
|
||||
tree = LinkTree(extension.prefix.lib)
|
||||
|
||||
def ignore(filename):
|
||||
return (filename in spack.store.store().layout.hidden_file_paths or
|
||||
return (filename in spack.store.layout.hidden_file_paths or
|
||||
kwargs.get('ignore', lambda f: False)(filename))
|
||||
|
||||
conflict = tree.find_conflict(dest_dir, ignore=ignore)
|
||||
@@ -71,8 +71,8 @@ def ignore(filename):
|
||||
|
||||
def deactivate(self, extension, **kwargs):
|
||||
extensions_layout = kwargs.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
if extensions_layout is not spack.store.store().extensions:
|
||||
spack.store.extensions)
|
||||
if extensions_layout is not spack.store.extensions:
|
||||
raise ExtensionError(
|
||||
'aspell does not support non-global extensions')
|
||||
|
||||
@@ -80,7 +80,7 @@ def deactivate(self, extension, **kwargs):
|
||||
dest_dir = aspell('dump', 'config', 'dict-dir', output=str).strip()
|
||||
|
||||
def ignore(filename):
|
||||
return (filename in spack.store.store().layout.hidden_file_paths or
|
||||
return (filename in spack.store.layout.hidden_file_paths or
|
||||
kwargs.get('ignore', lambda f: False)(filename))
|
||||
|
||||
tree = LinkTree(extension.prefix.lib)
|
||||
|
@@ -193,7 +193,7 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
|
||||
# the login node components with this spack invocation. We
|
||||
# need these paths to be the ones created in the CNL
|
||||
# spack invocation.
|
||||
store = spack.store.store()
|
||||
store = spack.store
|
||||
be_cbtf = store.db.query_one('cbtf arch=cray-CNL-haswell')
|
||||
be_cbtfk = store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
|
||||
be_papi = store.db.query_one('papi arch=cray-CNL-haswell')
|
||||
|
@@ -197,7 +197,7 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
|
||||
# spec['cbtf'].prefix is the login node value for this build, as
|
||||
# we only get here when building the login node components and
|
||||
# that is all that is known to spack.
|
||||
store = spack.store.store()
|
||||
store = spack.store
|
||||
be_ck = store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
|
||||
|
||||
# Equivalent to install-tool cmake arg:
|
||||
|
@@ -264,7 +264,7 @@ def activate(self, ext_pkg, **args):
|
||||
super(Perl, self).activate(ext_pkg, **args)
|
||||
|
||||
extensions_layout = args.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
|
||||
exts = extensions_layout.extension_map(self.spec)
|
||||
exts[ext_pkg.name] = ext_pkg.spec
|
||||
@@ -276,7 +276,7 @@ def deactivate(self, ext_pkg, **args):
|
||||
super(Perl, self).deactivate(ext_pkg, **args)
|
||||
|
||||
extensions_layout = args.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
|
||||
exts = extensions_layout.extension_map(self.spec)
|
||||
# Make deactivate idempotent
|
||||
|
@@ -320,7 +320,7 @@ def _save_distutil_vars(self, prefix):
|
||||
output_filename = None
|
||||
try:
|
||||
output_filename = join_path(
|
||||
spack.store.store().layout.metadata_path(self.spec),
|
||||
spack.store.layout.metadata_path(self.spec),
|
||||
Python._DISTUTIL_CACHE_FILENAME)
|
||||
with open(output_filename, 'w') as output_file:
|
||||
sjson.dump(self._distutil_vars, output_file)
|
||||
@@ -342,7 +342,7 @@ def _load_distutil_vars(self):
|
||||
if not self._distutil_vars and self.installed:
|
||||
try:
|
||||
input_filename = join_path(
|
||||
spack.store.store().layout.metadata_path(self.spec),
|
||||
spack.store.layout.metadata_path(self.spec),
|
||||
Python._DISTUTIL_CACHE_FILENAME)
|
||||
if os.path.isfile(input_filename):
|
||||
with open(input_filename) as input_file:
|
||||
@@ -680,7 +680,7 @@ def activate(self, ext_pkg, **args):
|
||||
args.update(ignore=ignore)
|
||||
|
||||
extensions_layout = args.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
|
||||
super(Python, self).activate(ext_pkg, **args)
|
||||
|
||||
@@ -696,7 +696,7 @@ def deactivate(self, ext_pkg, **args):
|
||||
super(Python, self).deactivate(ext_pkg, **args)
|
||||
|
||||
extensions_layout = args.get("extensions_layout",
|
||||
spack.store.store().extensions)
|
||||
spack.store.extensions)
|
||||
|
||||
exts = extensions_layout.extension_map(self.spec)
|
||||
# Make deactivate idempotent
|
||||
|
Reference in New Issue
Block a user