init: initialize spack.store lazily

- spack.store was previously initialized at the spack.store module level,
  but this means the store has to be initialized on every spack call.

- this moves the state in spack.store to a singleton so that the store is
  only initialized when needed.
This commit is contained in:
Todd Gamblin 2018-05-10 11:45:44 -07:00 committed by scheibelp
parent c615d2be06
commit 6f2cac9565
36 changed files with 239 additions and 198 deletions

View File

@ -148,8 +148,8 @@ def write_buildinfo_file(prefix, workdir, rel=False):
# Check if the file contains a string with the installroot.
# This cuts down on the number of files added to the list
# of files potentially needing relocation
if relocate.strings_contains_installroot(path_name,
spack.store.layout.root):
if relocate.strings_contains_installroot(
path_name, spack.store.store().layout.root):
filetype = relocate.get_filetype(path_name)
if relocate.needs_binary_relocation(filetype, os_id):
rel_path_name = os.path.relpath(path_name, prefix)
@ -161,9 +161,9 @@ def write_buildinfo_file(prefix, workdir, rel=False):
# Create buildinfo data and write it to disk
buildinfo = {}
buildinfo['relative_rpaths'] = rel
buildinfo['buildpath'] = spack.store.layout.root
buildinfo['relative_prefix'] = os.path.relpath(prefix,
spack.store.layout.root)
buildinfo['buildpath'] = spack.store.store().layout.root
buildinfo['relative_prefix'] = os.path.relpath(
prefix, spack.store.store().layout.root)
buildinfo['relocate_textfiles'] = text_to_relocate
buildinfo['relocate_binaries'] = binary_to_relocate
filename = buildinfo_file_name(workdir)
@ -333,8 +333,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
# Add original install prefix relative to layout root to spec.yaml.
# This will be used to determine is the directory layout has changed.
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(spec.prefix,
spack.store.layout.root)
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.store().layout.root)
spec_dict['buildinfo'] = buildinfo
with open(specfile_path, 'w') as outfile:
outfile.write(yaml.dump(spec_dict))
@ -414,7 +414,7 @@ def relocate_package(workdir, allow_root):
Relocate the given package
"""
buildinfo = read_buildinfo_file(workdir)
new_path = spack.store.layout.root
new_path = spack.store.store().layout.root
old_path = buildinfo['buildpath']
rel = buildinfo.get('relative_rpaths', False)
if rel:
@ -493,7 +493,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
"It cannot be installed.")
new_relative_prefix = str(os.path.relpath(spec.prefix,
spack.store.layout.root))
spack.store.store().layout.root))
# if the original relative prefix is in the spec file use it
buildinfo = spec_dict.get('buildinfo', {})
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)

View File

@ -256,7 +256,7 @@ def set_build_environment_variables(pkg, env, dirty):
env.set(SPACK_PREFIX, pkg.prefix)
# Install root prefix
env.set(SPACK_INSTALL, spack.store.root)
env.set(SPACK_INSTALL, spack.store.store().root)
# Stuff in here sanitizes the build environment to eliminate
# anything the user has set that may interfere.

View File

@ -182,7 +182,7 @@ def elide_list(line_list, max_num=10):
def disambiguate_spec(spec):
matching_specs = spack.store.db.query(spec)
matching_specs = spack.store.store().db.query(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)

View File

@ -54,9 +54,10 @@ def activate(parser, args):
if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name)
layout = spack.store.extensions
layout = spack.store.store().extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(args.view, spack.store.layout)
layout = YamlViewExtensionsLayout(
args.view, spack.store.store().layout)
if spec.package.is_activated(extensions_layout=layout):
tty.msg("Package %s is already activated." % specs[0].short_spec)

View File

@ -68,7 +68,7 @@ def bootstrap(parser, args, **kwargs):
requirement_dict = {'environment-modules': 'environment-modules~X'}
for requirement in requirement_dict:
installed_specs = spack.store.db.query(requirement)
installed_specs = spack.store.store().db.query(requirement)
if(len(installed_specs) > 0):
tty.msg("Requirement %s is satisfied with installed "
"package %s" % (requirement, installed_specs[0]))

View File

@ -116,7 +116,7 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, force=False):
has_errors = False
specs = spack.cmd.parse_specs(pkgs)
for spec in specs:
matching = spack.store.db.query(spec)
matching = spack.store.store().db.query(spec)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@ -254,7 +254,7 @@ def install_tarball(spec, args):
tty.msg('Installing buildcache for spec %s' % spec.format())
bindist.extract_tarball(spec, tarball, args.allow_root,
args.unsigned, args.force)
spack.store.db.reindex(spack.store.layout)
spack.store.store().reindex()
else:
tty.die('Download of binary cache file for spec %s failed.' %
spec.format())

View File

@ -73,12 +73,12 @@ def _specs(self, **kwargs):
# return everything for an empty query.
if not qspecs:
return spack.store.db.query(**kwargs)
return spack.store.store().db.query(**kwargs)
# Return only matching stuff otherwise.
specs = set()
for spec in qspecs:
for s in spack.store.db.query(spec, **kwargs):
for s in spack.store.store().db.query(spec, **kwargs):
specs.add(s)
return sorted(specs)

View File

@ -60,14 +60,15 @@ def deactivate(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package
layout = spack.store.extensions
layout = spack.store.store().extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(args.view, spack.store.layout)
layout = YamlViewExtensionsLayout(
args.view, spack.store.store().layout)
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
ext_pkgs = spack.store.db.activated_extensions_for(
ext_pkgs = spack.store.store().db.activated_extensions_for(
spec, extensions_layout=layout)
for ext_pkg in ext_pkgs:

View File

@ -76,16 +76,16 @@ def create_db_tarball(args):
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(spack.store.root)
base = os.path.basename(spack.store.store().root)
transform_args = []
if 'GNU' in tar('--version', output=str):
transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
else:
transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
wd = os.path.dirname(spack.store.root)
wd = os.path.dirname(spack.store.store().root)
with working_dir(wd):
files = [spack.store.db._index_path]
files = [spack.store.store().db._index_path]
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
files = [os.path.relpath(f) for f in files]

View File

@ -60,7 +60,7 @@ def dependencies(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
tty.msg("Dependencies of %s" % spec.format('$_$@$%@$/', color=True))
deps = spack.store.db.installed_relatives(
deps = spack.store.store().db.installed_relatives(
spec, 'children', args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)

View File

@ -103,7 +103,7 @@ def dependents(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
tty.msg("Dependents of %s" % spec.cformat('$_$@$%@$/'))
deps = spack.store.db.installed_relatives(
deps = spack.store.store().db.installed_relatives(
spec, 'parents', args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)

View File

@ -113,16 +113,17 @@ def extensions(parser, args):
tty.msg("%d extensions:" % len(extensions))
colify(ext.name for ext in extensions)
layout = spack.store.extensions
layout = spack.store.store().extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(args.view, spack.store.layout)
layout = YamlViewExtensionsLayout(
args.view, spack.store.store().layout)
if show_installed:
#
# List specs of installed extensions.
#
installed = [s.spec
for s in spack.store.db.installed_extensions_for(spec)]
store = spack.store.store()
installed = [s.spec for s in store.db.installed_extensions_for(spec)]
if show_all:
print

View File

@ -77,7 +77,7 @@ def graph(parser, args):
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
specs = spack.store.db.query()
specs = spack.store.store().db.query()
else:
specs = spack.cmd.parse_specs(

View File

@ -244,7 +244,7 @@ def install(parser, args, **kwargs):
"only one spec is allowed when overwriting an installation"
spec = specs[0]
t = spack.store.db.query(spec)
t = spack.store.store().db.query(spec)
assert len(t) == 1, "to overwrite a spec you must install it first"
# Give the user a last chance to think about overwriting an already

View File

@ -31,4 +31,4 @@
def reindex(parser, args):
spack.store.db.reindex(spack.store.layout)
spack.store.store().reindex()

View File

@ -135,7 +135,7 @@ def setup(self, args):
tty.die("spack setup only takes one spec.")
# Take a write lock before checking for existence.
with spack.store.db.write_transaction():
with spack.store.store().db.write_transaction():
spec = specs[0]
if not spack.repo.path().exists(spec.name):
tty.die("No package for '{0}' was found.".format(spec.name),

View File

@ -93,7 +93,7 @@ def find_matching_specs(specs, allow_multiple_matches=False, force=False):
specs_from_cli = []
has_errors = False
for spec in specs:
matching = spack.store.db.query(spec)
matching = spack.store.store().db.query(spec)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@ -128,7 +128,8 @@ def installed_dependents(specs):
"""
dependents = {}
for item in specs:
installed = spack.store.db.installed_relatives(item, 'parents', True)
installed = spack.store.store().db.installed_relatives(
item, 'parents', True)
lst = [x for x in installed if x not in specs]
if lst:
lst = list(set(lst))
@ -157,7 +158,7 @@ def do_uninstall(specs, force):
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):
dependents = spack.store.db.installed_relatives(
dependents = spack.store.store().db.installed_relatives(
pkg.spec, 'parents', True)
return len(dependents)

View File

@ -93,7 +93,7 @@ def squash(matching_specs):
return matching_specs[0]
# make function always return a list to keep consistency between py2/3
return list(map(squash, map(spack.store.db.query, specs)))
return list(map(squash, map(spack.store.store().db.query, specs)))
def setup_parser(sp):
@ -176,7 +176,7 @@ def view(parser, args):
path = args.path[0]
view = YamlFilesystemView(
path, spack.store.layout,
path, spack.store.store().layout,
ignore_conflicts=getattr(args, "ignore_conflicts", False),
link=os.link if args.action in ["hardlink", "hard"]
else os.symlink,

View File

@ -223,7 +223,7 @@ def prefix_lock(self, spec):
Prefix lock is a byte range lock on the nth byte of a file.
The lock file is ``spack.store.db.prefix_lock`` -- the DB
The lock file is ``spack.store.store().db.prefix_lock`` -- the DB
tells us what to call it and it lives alongside the install DB.
n is the sys.maxsize-bit prefix of the DAG hash. This makes
@ -366,7 +366,7 @@ def check(cond, msg):
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
self.reindex(spack.store.layout)
self.reindex(spack.store.store().layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
@ -499,7 +499,7 @@ def _read_suppress_error():
tty.debug(
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
try:
layout = spack.store.layout
layout = spack.store.store().layout
if entry.spec.external:
layout = None
install_check = True
@ -609,7 +609,7 @@ def _read(self):
# reindex() takes its own write lock, so no lock here.
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
self._write(None, None, None)
self.reindex(spack.store.layout)
self.reindex(spack.store.store().layout)
def _add(
self,
@ -823,7 +823,7 @@ def activated_extensions_for(self, extendee_spec, extensions_layout=None):
the given spec
"""
if extensions_layout is None:
extensions_layout = spack.store.extensions
extensions_layout = spack.store.store().extensions
for spec in self.query():
try:
extensions_layout.check_activated(extendee_spec, spec)

View File

@ -267,7 +267,7 @@ def add_standalone(self, spec):
# Check for globally activated extensions in the extendee that
# we're looking at.
activated = [p.spec for p in
spack.store.db.activated_extensions_for(spec)]
spack.store.store().db.activated_extensions_for(spec)]
if activated:
tty.error("Globally activated extensions cannot be used in "
"conjunction with filesystem views. "
@ -390,7 +390,8 @@ def remove_standalone(self, spec):
tty.info(self._croot + 'Removed package: %s' % colorize_spec(spec))
def get_all_specs(self):
dotspack = join_path(self.root, spack.store.layout.metadata_dir)
dotspack = join_path(self.root,
spack.store.store().layout.metadata_dir)
if os.path.exists(dotspack):
return list(filter(None, map(self.get_spec, os.listdir(dotspack))))
else:
@ -407,12 +408,14 @@ def get_conflicts(self, *specs):
def get_path_meta_folder(self, spec):
"Get path to meta folder for either spec or spec name."
return join_path(self.root, spack.store.layout.metadata_dir,
return join_path(self.root,
spack.store.store().layout.metadata_dir,
getattr(spec, "name", spec))
def get_spec(self, spec):
dotspack = self.get_path_meta_folder(spec)
filename = join_path(dotspack, spack.store.layout.spec_file_name)
filename = join_path(dotspack,
spack.store.store().layout.spec_file_name)
try:
with open(filename, "r") as f:
@ -421,7 +424,7 @@ def get_spec(self, spec):
return None
def link_meta_folder(self, spec):
src = spack.store.layout.metadata_path(spec)
src = spack.store.store().layout.metadata_path(spec)
tgt = self.get_path_meta_folder(spec)
tree = LinkTree(src)
@ -547,4 +550,4 @@ def get_dependencies(specs):
def ignore_metadata_dir(f):
return f in spack.store.layout.hidden_file_paths
return f in spack.store.store().layout.hidden_file_paths

View File

@ -949,7 +949,7 @@ def is_activated(self, extensions_layout=None):
raise ValueError(
"is_extension called on package that is not an extension.")
if extensions_layout is None:
extensions_layout = spack.store.extensions
extensions_layout = spack.store.store().extensions
exts = extensions_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
@ -1003,7 +1003,7 @@ def remove_prefix(self):
Removes the prefix for a package along with any empty parent
directories
"""
spack.store.layout.remove_install_directory(self.spec)
spack.store.store().layout.remove_install_directory(self.spec)
def do_fetch(self, mirror_only=False):
"""
@ -1241,7 +1241,8 @@ def do_fake_install(self):
# Install fake man page
mkdirp(self.prefix.man.man1)
packages_dir = spack.store.layout.build_packages_path(self.spec)
store = spack.store.store()
packages_dir = store.layout.build_packages_path(self.spec)
dump_packages(self.spec, packages_dir)
def _if_make_target_execute(self, target):
@ -1304,7 +1305,7 @@ def _resource_stage(self, resource):
def _stage_and_write_lock(self):
"""Prefix lock nested in a stage."""
with self.stage:
with spack.store.db.prefix_write_lock(self.spec):
with spack.store.store().db.prefix_write_lock(self.spec):
yield
def _process_external_package(self, explicit):
@ -1328,7 +1329,7 @@ def _process_external_package(self, explicit):
try:
# Check if the package was already registered in the DB
# If this is the case, then just exit
rec = spack.store.db.get_record(self.spec)
rec = spack.store.store().db.get_record(self.spec)
message = '{s.name}@{s.version} : already registered in DB'
tty.msg(message.format(s=self))
# Update the value of rec.explicit if it is necessary
@ -1344,12 +1345,12 @@ def _process_external_package(self, explicit):
# Add to the DB
message = '{s.name}@{s.version} : registering into DB'
tty.msg(message.format(s=self))
spack.store.db.add(self.spec, None, explicit=explicit)
spack.store.store().db.add(self.spec, None, explicit=explicit)
def _update_explicit_entry_in_db(self, rec, explicit):
if explicit and not rec.explicit:
with spack.store.db.write_transaction():
rec = spack.store.db.get_record(self.spec)
with spack.store.store().db.write_transaction():
rec = spack.store.store().db.get_record(self.spec)
rec.explicit = True
message = '{s.name}@{s.version} : marking the package explicit'
tty.msg(message.format(s=self))
@ -1366,7 +1367,8 @@ def try_install_from_binary_cache(self, explicit):
binary_distribution.extract_tarball(
binary_spec, tarball, allow_root=False,
unsigned=False, force=False)
spack.store.db.add(self.spec, spack.store.layout, explicit=explicit)
spack.store.store().db.add(
self.spec, spack.store.layout, explicit=explicit)
return True
def do_install(self,
@ -1423,15 +1425,15 @@ def do_install(self,
partial = self.check_for_unfinished_installation(keep_prefix, restage)
# Ensure package is not already installed
layout = spack.store.layout
with spack.store.db.prefix_read_lock(self.spec):
layout = spack.store.store().layout
with spack.store.store().db.prefix_read_lock(self.spec):
if partial:
tty.msg(
"Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = '{0.name} is already installed in {0.prefix}'
tty.msg(msg.format(self))
rec = spack.store.db.get_record(self.spec)
rec = spack.store.store().db.get_record(self.spec)
# In case the stage directory has already been created,
# this ensures it's removed after we checked that the spec
# is installed
@ -1566,7 +1568,7 @@ def build_process():
try:
# Create the install prefix and fork the build process.
if not os.path.exists(self.prefix):
spack.store.layout.create_install_directory(self.spec)
spack.store.store().layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
# we preserve verbosity settings across installs.
@ -1577,8 +1579,8 @@ def build_process():
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(
self.spec, spack.store.layout, explicit=explicit
spack.store.store().db.add(
self.spec, spack.store.store().layout, explicit=explicit
)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
@ -1632,9 +1634,9 @@ def check_for_unfinished_installation(
raise ExternalPackageError("Attempted to repair external spec %s" %
self.spec.name)
with spack.store.db.prefix_write_lock(self.spec):
with spack.store.store().db.prefix_write_lock(self.spec):
try:
record = spack.store.db.get_record(self.spec)
record = spack.store.store().db.get_record(self.spec)
installed_in_db = record.installed if record else False
except KeyError:
installed_in_db = False
@ -1669,9 +1671,10 @@ def _do_install_pop_kwargs(self, kwargs):
def log(self):
# Copy provenance into the install directory on success
log_install_path = spack.store.layout.build_log_path(self.spec)
env_install_path = spack.store.layout.build_env_path(self.spec)
packages_dir = spack.store.layout.build_packages_path(self.spec)
store = spack.store.store()
log_install_path = store.layout.build_log_path(self.spec)
env_install_path = store.layout.build_env_path(self.spec)
packages_dir = store.layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
@ -1690,8 +1693,9 @@ def log(self):
with working_dir(self.stage.source_path):
errors = StringIO()
target_dir = os.path.join(
spack.store.layout.metadata_path(self.spec), 'archived-files'
)
spack.store.store().layout.metadata_path(self.spec),
'archived-files')
for glob_expr in self.archive_files:
# Check that we are trying to copy things that are
# in the source_path tree (not arbitrary files)
@ -1750,7 +1754,7 @@ def check_paths(path_list, filetype, predicate):
installed = set(os.listdir(self.prefix))
installed.difference_update(
spack.store.layout.hidden_file_paths)
spack.store.store().layout.hidden_file_paths)
if not installed:
raise InstallError(
"Install failed for %s. Nothing was installed!" % self.name)
@ -1758,7 +1762,7 @@ def check_paths(path_list, filetype, predicate):
@property
def build_log_path(self):
if self.installed:
return spack.store.layout.build_log_path(self.spec)
return spack.store.store().layout.build_log_path(self.spec)
else:
return join_path(self.stage.source_path, 'spack-build.out')
@ -1916,16 +1920,16 @@ def uninstall_by_spec(spec, force=False):
if not os.path.isdir(spec.prefix):
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.store.db.query(spec, installed=True)
specs = spack.store.store().db.query(spec, installed=True)
if specs:
spack.store.db.remove(specs[0])
spack.store.store().db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
return
else:
raise InstallError(str(spec) + " is not installed.")
if not force:
dependents = spack.store.db.installed_relatives(
dependents = spack.store.store().db.installed_relatives(
spec, 'parents', True)
if dependents:
raise PackageStillNeededError(spec, dependents)
@ -1937,7 +1941,7 @@ def uninstall_by_spec(spec, force=False):
pkg = None
# Pre-uninstall hook runs first.
with spack.store.db.prefix_write_lock(spec):
with spack.store.store().db.prefix_write_lock(spec):
if pkg is not None:
spack.hooks.pre_uninstall(spec)
@ -1946,11 +1950,11 @@ def uninstall_by_spec(spec, force=False):
if not spec.external:
msg = 'Deleting package prefix [{0}]'
tty.debug(msg.format(spec.short_spec))
spack.store.layout.remove_install_directory(spec)
spack.store.store().layout.remove_install_directory(spec)
# Delete DB entry
msg = 'Deleting DB entry [{0}]'
tty.debug(msg.format(spec.short_spec))
spack.store.db.remove(spec)
spack.store.store().db.remove(spec)
if pkg is not None:
spack.hooks.post_uninstall(spec)
@ -1996,7 +2000,7 @@ def do_activate(self, with_dependencies=True, ignore_conflicts=False,
self._sanity_check_extension()
if extensions_layout is None:
extensions_layout = spack.store.extensions
extensions_layout = spack.store.store().extensions
extensions_layout.check_extension_conflict(
self.extendee_spec, self.spec)
@ -2040,11 +2044,11 @@ def activate(self, extension, ignore_conflicts=False, **kwargs):
"""
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
target = extensions_layout.extendee_target_directory(self)
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
return (filename in spack.store.store().layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@ -2072,7 +2076,7 @@ def do_deactivate(self, **kwargs):
verbose = kwargs.get("verbose", True)
remove_dependents = kwargs.get("remove_dependents", False)
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
@ -2124,11 +2128,11 @@ def deactivate(self, extension, **kwargs):
"""
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
target = extensions_layout.extendee_target_directory(self)
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
return (filename in spack.store.store().layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@ -2273,7 +2277,7 @@ def flatten_dependencies(spec, flat_dir):
for dep in spec.traverse(root=False):
name = dep.name
dep_path = spack.store.layout.path_for_spec(dep)
dep_path = spack.store.store().layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + '/' + name)
@ -2302,7 +2306,7 @@ def dump_packages(spec, path):
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
source = spack.store.layout.build_packages_path(node)
source = spack.store.store().layout.build_packages_path(node)
source_repo_root = join_path(source, node.namespace)
# There's no provenance installed for the source package. Skip it.

View File

@ -110,7 +110,7 @@ def get_placeholder_rpaths(path_name, orig_rpaths):
Replaces original layout root dir with a placeholder string in all rpaths.
"""
rel_rpaths = []
orig_dir = spack.store.layout.root
orig_dir = spack.store.store().layout.root
for rpath in orig_rpaths:
if re.match(orig_dir, rpath):
placeholder = set_placeholder(orig_dir)
@ -186,7 +186,7 @@ def macho_make_paths_placeholder(rpaths, deps, idpath):
replacement are returned.
"""
new_idpath = None
old_dir = spack.store.layout.root
old_dir = spack.store.store().layout.root
placeholder = set_placeholder(old_dir)
if idpath:
new_idpath = re.sub(old_dir, placeholder, idpath)
@ -425,9 +425,9 @@ def make_binary_placeholder(cur_path_names, allow_root):
new_rpaths, new_deps, new_idpath)
if (not allow_root and
strings_contains_installroot(cur_path,
spack.store.layout.root)):
raise InstallRootStringException(cur_path,
spack.store.layout.root)
spack.store.store().layout.root)):
raise InstallRootStringException(
cur_path, spack.store.store().layout.root)
elif platform.system() == 'Linux':
for cur_path in cur_path_names:
orig_rpaths = get_existing_elf_rpaths(cur_path)
@ -435,10 +435,10 @@ def make_binary_placeholder(cur_path_names, allow_root):
new_rpaths = get_placeholder_rpaths(cur_path, orig_rpaths)
modify_elf_object(cur_path, new_rpaths)
if (not allow_root and
strings_contains_installroot(cur_path,
spack.store.layout.root)):
raise InstallRootStringException(cur_path,
spack.store.layout.root)
strings_contains_installroot(
cur_path, spack.store.store().layout.root)):
raise InstallRootStringException(
cur_path, spack.store.store().layout.root)
else:
tty.die("Placeholder not implemented for %s" % platform.system())

View File

@ -1402,7 +1402,7 @@ def cshort_spec(self):
@property
def prefix(self):
if self._prefix is None:
self.prefix = spack.store.layout.path_for_spec(self)
self.prefix = spack.store.store().layout.path_for_spec(self)
return self._prefix
@prefix.setter
@ -3107,7 +3107,7 @@ def write(s, c):
elif named_str == 'SPACK_ROOT':
out.write(fmt % token_transform(spack.paths.prefix))
elif named_str == 'SPACK_INSTALL':
out.write(fmt % token_transform(spack.store.root))
out.write(fmt % token_transform(spack.store.store().root))
elif named_str == 'PREFIX':
out.write(fmt % token_transform(self.prefix))
elif named_str.startswith('HASH'):
@ -3149,7 +3149,7 @@ def _install_status(self):
if not self.concrete:
return None
try:
record = spack.store.db.get_record(self)
record = spack.store.store().db.get_record(self)
return record.installed
except KeyError:
return None
@ -3159,7 +3159,7 @@ def _installed_explicitly(self):
if not self.concrete:
return None
try:
record = spack.store.db.get_record(self)
record = spack.store.store().db.get_record(self)
return record.explicit
except KeyError:
return None
@ -3382,7 +3382,7 @@ def parse_compiler(self, text):
def spec_by_hash(self):
self.expect(ID)
specs = spack.store.db.query()
specs = spack.store.store().db.query()
matches = [spec for spec in specs if
spec.dag_hash()[:len(self.token.value)] == self.token.value]

View File

@ -45,32 +45,59 @@
import os
import spack.paths
import spack.config
from spack.util.path import canonicalize_path
from spack.database import Database
from spack.directory_layout import YamlDirectoryLayout
from spack.directory_layout import YamlExtensionsLayout
import spack.util.path
import spack.database
import spack.directory_layout
__author__ = "Benedikt Hegner (CERN)"
__all__ = ['db', 'extensions', 'layout', 'root']
#: default installation root, relative to the Spack install path
default_root = os.path.join(spack.paths.opt_path, 'spack')
#
# Set up the install path
#
root = canonicalize_path(spack.config.get(
'config:install_tree', os.path.join(spack.paths.opt_path, 'spack')))
#
# Set up the installed packages database
#
db = Database(root)
class Store(object):
"""A store is a path full of installed Spack packages.
#
# This controls how spack lays out install prefixes and
# stage directories.
#
layout = YamlDirectoryLayout(
root,
hash_len=spack.config.get('config:install_hash_length'),
path_scheme=spack.config.get('config:install_path_scheme'))
Stores consist of packages installed according to a
``DirectoryLayout``, along with an index, or _database_ of their
contents. The directory layout controls what paths look like and how
Spack ensures that each uniqe spec gets its own unique directory (or
not, though we don't recommend that). The database is a signle file
that caches metadata for the entire Spack installation. It prevents
us from having to spider the install tree to figure out what's there.
extensions = YamlExtensionsLayout(root, layout)
Args:
root (str): path to the root of the install tree
path_scheme (str): expression according to guidelines in
``spack.util.path`` that describes how to construct a path to
a package prefix in this store
hash_length (int): length of the hashes used in the directory
layout; spec hash suffixes will be truncated to this length
"""
def __init__(self, root, path_scheme, hash_length):
self.root = root
self.db = spack.database.Database(root)
self.layout = spack.directory_layout.YamlDirectoryLayout(
root, hash_len=hash_length, path_scheme=path_scheme)
self.extensions = spack.directory_layout.YamlExtensionsLayout(
root, self.layout)
def reindex(self):
"""Convenience function to reindex the store DB with its own layout."""
return self.db.reindex(self.layout)
#: Singleton store instance
_store = None
def store():
"""Get the singleton store instance."""
global _store
if _store is None:
root = spack.config.get('config:install_tree', default_root)
root = spack.util.path.canonicalize_path(root)
_store = Store(root,
spack.config.get('config:install_path_scheme'),
spack.config.get('config:install_hash_length'))
return _store

View File

@ -60,7 +60,7 @@ def test_immediate_installed_dependencies(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7)
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
for s in ['mpich', 'callpath^mpich']])
assert expected == hashes
@ -74,7 +74,7 @@ def test_transitive_installed_dependencies(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7)
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
for s in ['zmpi', 'callpath^zmpi', 'fake',
'dyninst', 'libdwarf', 'libelf']])

View File

@ -58,10 +58,10 @@ def test_immediate_installed_dependents(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7)
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
for s in ['dyninst', 'libdwarf']])
libelf = spack.store.db.query_one('libelf')
libelf = spack.store.store().db.query_one('libelf')
expected = set([d.dag_hash(7) for d in libelf.dependents()])
assert expected == hashes
@ -75,7 +75,7 @@ def test_transitive_installed_dependents(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
expected = set([spack.store.db.query_one(s).dag_hash(7)
expected = set([spack.store.store().db.query_one(s).dag_hash(7)
for s in ['zmpi', 'callpath^zmpi', 'mpileaks^zmpi']])
assert expected == hashes

View File

@ -387,7 +387,7 @@ def test_extra_files_are_archived(mock_packages, mock_archive, mock_fetch,
install('archive-files')
archive_dir = os.path.join(
spack.store.layout.metadata_path(s), 'archived-files'
spack.store.store().layout.metadata_path(s), 'archived-files'
)
config_log = os.path.join(archive_dir, 'config.log')
assert os.path.exists(config_log)

View File

@ -61,7 +61,7 @@ def test_recursive_uninstall():
"""Test recursive uninstall."""
uninstall('-y', '-a', '--dependents', 'callpath')
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.store().layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]

View File

@ -283,18 +283,18 @@ def database(tmpdir_factory, mock_packages, config):
# Make a fake install directory
install_path = tmpdir_factory.mktemp('install_for_database')
spack_install_path = spack.store.root
spack_install_path = spack.store.store().root
spack.store.root = str(install_path)
spack.store.store().root = str(install_path)
install_layout = spack.directory_layout.YamlDirectoryLayout(
str(install_path))
spack_install_layout = spack.store.layout
spack.store.layout = install_layout
spack_install_layout = spack.store.store().layout
spack.store.store().layout = install_layout
# Make fake database and fake install directory.
install_db = spack.database.Database(str(install_path))
spack_install_db = spack.store.db
spack.store.db = install_db
spack_install_db = spack.store.store().db
spack.store.store().db = install_db
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
Database = collections.namedtuple(
@ -316,8 +316,8 @@ def _uninstall(spec):
spec.package.do_uninstall(spec)
def _refresh():
with spack.store.db.write_transaction():
for spec in spack.store.db.query():
with spack.store.store().db.write_transaction():
for spec in spack.store.store().db.query():
_uninstall(spec)
_install('mpileaks ^mpich')
_install('mpileaks ^mpich2')
@ -332,7 +332,7 @@ def _refresh():
refresh=_refresh)
# Transaction used to avoid repeated writes.
with spack.store.db.write_transaction():
with spack.store.store().db.write_transaction():
t.install('mpileaks ^mpich')
t.install('mpileaks ^mpich2')
t.install('mpileaks ^zmpi')
@ -340,17 +340,17 @@ def _refresh():
yield t
with spack.store.db.write_transaction():
for spec in spack.store.db.query():
with spack.store.store().db.write_transaction():
for spec in spack.store.store().db.query():
if spec.package.installed:
t.uninstall(spec)
else:
spack.store.db.remove(spec)
spack.store.store().db.remove(spec)
install_path.remove(rec=1)
spack.store.root = spack_install_path
spack.store.layout = spack_install_layout
spack.store.db = spack_install_db
spack.store.store().root = spack_install_path
spack.store.store().layout = spack_install_layout
spack.store.store().db = spack_install_db
@pytest.fixture()
@ -363,26 +363,27 @@ def refresh_db_on_exit(database):
@pytest.fixture()
def install_mockery(tmpdir, config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack."""
layout = spack.store.layout
extensions = spack.store.extensions
db = spack.store.db
layout = spack.store.store().layout
extensions = spack.store.store().extensions
db = spack.store.store().db
new_opt = str(tmpdir.join('opt'))
# Use a fake install directory to avoid conflicts bt/w
# installed pkgs and mock packages.
spack.store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
spack.store.extensions = spack.directory_layout.YamlExtensionsLayout(
new_opt, spack.store.layout)
spack.store.db = spack.database.Database(new_opt)
store = spack.store.store()
store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
store.extensions = spack.directory_layout.YamlExtensionsLayout(
new_opt, spack.store.store().layout)
store.db = spack.database.Database(new_opt)
# We use a fake package, so temporarily disable checksumming
with spack.config.override('config:checksum', False):
yield
# Restore Spack's layout.
spack.store.layout = layout
spack.store.extensions = extensions
spack.store.db = db
store.layout = layout
store.extensions = extensions
store.db = db
@pytest.fixture()

View File

@ -47,16 +47,16 @@ def _print_ref_counts():
recs = []
def add_rec(spec):
cspecs = spack.store.db.query(spec, installed=any)
cspecs = spack.store.store().db.query(spec, installed=any)
if not cspecs:
recs.append("[ %-7s ] %-20s-" % ('', spec))
else:
key = cspecs[0].dag_hash()
rec = spack.store.db.get_record(cspecs[0])
rec = spack.store.store().db.get_record(cspecs[0])
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
with spack.store.db.read_transaction():
with spack.store.store().db.read_transaction():
add_rec('mpileaks ^mpich')
add_rec('callpath ^mpich')
add_rec('mpich')
@ -79,7 +79,7 @@ def add_rec(spec):
def _check_merkleiness():
"""Ensure the spack database is a valid merkle graph."""
all_specs = spack.store.db.query(installed=any)
all_specs = spack.store.store().db.query(installed=any)
seen = {}
for spec in all_specs:
@ -93,7 +93,7 @@ def _check_merkleiness():
def _check_db_sanity(install_db):
"""Utiilty function to check db against install layout."""
pkg_in_layout = sorted(spack.store.layout.all_specs())
pkg_in_layout = sorted(spack.store.store().layout.all_specs())
actual = sorted(install_db.query())
externals = sorted([x for x in actual if x.external])
@ -129,7 +129,7 @@ def _check_remove_and_add_package(install_db, spec):
assert concrete_spec not in remaining
# add it back and make sure everything is ok.
install_db.add(concrete_spec, spack.store.layout)
install_db.add(concrete_spec, spack.store.store().layout)
installed = install_db.query()
assert concrete_spec in installed
assert installed == original
@ -147,7 +147,7 @@ def _mock_install(spec):
def _mock_remove(spec):
specs = spack.store.db.query(spec)
specs = spack.store.store().db.query(spec)
assert len(specs) == 1
spec = specs[0]
spec.package.do_uninstall(spec)
@ -206,7 +206,7 @@ def test_005_db_exists(database):
def test_010_all_install_sanity(database):
"""Ensure that the install layout reflects what we think it does."""
all_specs = spack.store.layout.all_specs()
all_specs = spack.store.store().layout.all_specs()
assert len(all_specs) == 14
# Query specs with multiple configurations
@ -241,12 +241,12 @@ def test_010_all_install_sanity(database):
def test_015_write_and_read(database):
# write and read DB
with spack.store.db.write_transaction():
specs = spack.store.db.query()
recs = [spack.store.db.get_record(s) for s in specs]
with spack.store.store().db.write_transaction():
specs = spack.store.store().db.query()
recs = [spack.store.store().db.get_record(s) for s in specs]
for spec, rec in zip(specs, recs):
new_rec = spack.store.db.get_record(spec)
new_rec = spack.store.store().db.get_record(spec)
assert new_rec.ref_count == rec.ref_count
assert new_rec.spec == rec.spec
assert new_rec.path == rec.path
@ -262,7 +262,7 @@ def test_020_db_sanity(database):
def test_025_reindex(database):
"""Make sure reindex works and ref counts are valid."""
install_db = database.mock.db
spack.store.db.reindex(spack.store.layout)
spack.store.store().reindex()
_check_db_sanity(install_db)
@ -293,7 +293,7 @@ def test_050_basic_query(database):
"""Ensure querying database is consistent with what is installed."""
install_db = database.mock.db
# query everything
assert len(spack.store.db.query()) == 16
assert len(spack.store.store().db.query()) == 16
# query specs with multiple configurations
mpileaks_specs = install_db.query('mpileaks')
@ -350,7 +350,7 @@ def test_080_root_ref_counts(database):
assert install_db.get_record('mpich').ref_count == 1
# Put the spec back
install_db.add(rec.spec, spack.store.layout)
install_db.add(rec.spec, spack.store.store().layout)
# record is present again
assert len(install_db.query('mpileaks ^mpich', installed=any)) == 1
@ -436,7 +436,7 @@ def test_115_reindex_with_packages_not_in_repo(database, refresh_db_on_exit):
# packages should not have to be defined in the repository once they
# are installed
with spack.repo.swap(MockPackageMultiRepo([])):
spack.store.db.reindex(spack.store.layout)
spack.store.store().reindex()
_check_db_sanity(install_db)

View File

@ -336,8 +336,8 @@ def test_ambiguous_hash(self, database):
x2 = Spec('a')
x2._hash = 'xx'
x2._concrete = True
database.mock.db.add(x1, spack.store.layout)
database.mock.db.add(x2, spack.store.layout)
database.mock.db.add(x1, spack.store.store().layout)
database.mock.db.add(x2, spack.store.store().layout)
# ambiguity in first hash character
self._check_raises(AmbiguousHashError, ['/x'])

View File

@ -50,8 +50,8 @@ class Aspell(AutotoolsPackage):
# - dest_dir instead of self.prefix in tree.(find_conflict|merge)()
def activate(self, extension, **kwargs):
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
if extensions_layout is not spack.store.extensions:
spack.store.store().extensions)
if extensions_layout is not spack.store.store().extensions:
raise ExtensionError(
'aspell does not support non-global extensions')
@ -60,7 +60,7 @@ def activate(self, extension, **kwargs):
tree = LinkTree(extension.prefix.lib)
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
return (filename in spack.store.store().layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
conflict = tree.find_conflict(dest_dir, ignore=ignore)
@ -71,8 +71,8 @@ def ignore(filename):
def deactivate(self, extension, **kwargs):
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
if extensions_layout is not spack.store.extensions:
spack.store.store().extensions)
if extensions_layout is not spack.store.store().extensions:
raise ExtensionError(
'aspell does not support non-global extensions')
@ -80,7 +80,7 @@ def deactivate(self, extension, **kwargs):
dest_dir = aspell('dump', 'config', 'dict-dir', output=str).strip()
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
return (filename in spack.store.store().layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix.lib)

View File

@ -193,15 +193,16 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# the login node components with this spack invocation. We
# need these paths to be the ones created in the CNL
# spack invocation.
be_cbtf = spack.store.db.query_one('cbtf arch=cray-CNL-haswell')
be_cbtfk = spack.store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
be_papi = spack.store.db.query_one('papi arch=cray-CNL-haswell')
be_boost = spack.store.db.query_one('boost arch=cray-CNL-haswell')
be_mont = spack.store.db.query_one('libmonitor arch=cray-CNL-haswell')
be_unw = spack.store.db.query_one('libunwind arch=cray-CNL-haswell')
be_xer = spack.store.db.query_one('xerces-c arch=cray-CNL-haswell')
be_dyn = spack.store.db.query_one('dyninst arch=cray-CNL-haswell')
be_mrnet = spack.store.db.query_one('mrnet arch=cray-CNL-haswell')
store = spack.store.store()
be_cbtf = store.db.query_one('cbtf arch=cray-CNL-haswell')
be_cbtfk = store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
be_papi = store.db.query_one('papi arch=cray-CNL-haswell')
be_boost = store.db.query_one('boost arch=cray-CNL-haswell')
be_mont = store.db.query_one('libmonitor arch=cray-CNL-haswell')
be_unw = store.db.query_one('libunwind arch=cray-CNL-haswell')
be_xer = store.db.query_one('xerces-c arch=cray-CNL-haswell')
be_dyn = store.db.query_one('dyninst arch=cray-CNL-haswell')
be_mrnet = store.db.query_one('mrnet arch=cray-CNL-haswell')
CrayLoginNodeOptions.append('-DCN_RUNTIME_PLATFORM=%s'
% rt_platform)

View File

@ -197,7 +197,8 @@ def set_CrayLoginNode_cmakeOptions(self, spec, cmakeOptions):
# spec['cbtf'].prefix is the login node value for this build, as
# we only get here when building the login node components and
# that is all that is known to spack.
be_ck = spack.store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
store = spack.store.store()
be_ck = store.db.query_one('cbtf-krell arch=cray-CNL-haswell')
# Equivalent to install-tool cmake arg:
# '-DCBTF_KRELL_CN_RUNTIME_DIR=%s'

View File

@ -264,7 +264,7 @@ def activate(self, ext_pkg, **args):
super(Perl, self).activate(ext_pkg, **args)
extensions_layout = args.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
exts = extensions_layout.extension_map(self.spec)
exts[ext_pkg.name] = ext_pkg.spec
@ -276,7 +276,7 @@ def deactivate(self, ext_pkg, **args):
super(Perl, self).deactivate(ext_pkg, **args)
extensions_layout = args.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
exts = extensions_layout.extension_map(self.spec)
# Make deactivate idempotent

View File

@ -320,7 +320,7 @@ def _save_distutil_vars(self, prefix):
output_filename = None
try:
output_filename = join_path(
spack.store.layout.metadata_path(self.spec),
spack.store.store().layout.metadata_path(self.spec),
Python._DISTUTIL_CACHE_FILENAME)
with open(output_filename, 'w') as output_file:
sjson.dump(self._distutil_vars, output_file)
@ -342,7 +342,7 @@ def _load_distutil_vars(self):
if not self._distutil_vars and self.installed:
try:
input_filename = join_path(
spack.store.layout.metadata_path(self.spec),
spack.store.store().layout.metadata_path(self.spec),
Python._DISTUTIL_CACHE_FILENAME)
if os.path.isfile(input_filename):
with open(input_filename) as input_file:
@ -680,7 +680,7 @@ def activate(self, ext_pkg, **args):
args.update(ignore=ignore)
extensions_layout = args.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
super(Python, self).activate(ext_pkg, **args)
@ -696,7 +696,7 @@ def deactivate(self, ext_pkg, **args):
super(Python, self).deactivate(ext_pkg, **args)
extensions_layout = args.get("extensions_layout",
spack.store.extensions)
spack.store.store().extensions)
exts = extensions_layout.extension_map(self.spec)
# Make deactivate idempotent