Refactor "spack buildcache" command (#27776)

This PR is meant to move code with "business logic" from `spack.cmd.buildcache` to appropriate core modules[^1]. 

Modifications:
- [x] Add `spack.binary_distribution.push` to create a binary package from a spec and push it to a mirror
- [x] Add `spack.binary_distribution.install_root_node` to install only the root node of a concrete spec from a buildcache (may check the sha256 sum if it is passed in as input)
- [x] Add `spack.binary_distribution.install_single_spec` to install a single concrete spec from a buildcache
- [x] Add `spack.binary_distribution.download_single_spec` to download a single concrete spec from a buildcache to a local destination
- [x] Add `Spec.from_specfile` that construct a spec given the path of a JSON or YAML spec file
- [x] Removed logic from `spack.cmd.buildcache`
- [x] Removed calls to `spack.cmd.buildcache` in `spack.bootstrap`
- [x] Deprecate `spack buildcache copy` with a message that says it will be removed in v0.19.0

[^1]: The rationale is that commands should be lightweight wrappers of the core API, since that helps with both testing and scripting (easier mocking and no need to invoke `SpackCommand`s in a script).
This commit is contained in:
Massimiliano Culpo 2021-12-10 10:23:14 +01:00 committed by GitHub
parent e199d7ef6b
commit d17511a806
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 520 additions and 447 deletions

View File

@ -13,6 +13,7 @@
import tarfile
import tempfile
import traceback
import warnings
from contextlib import closing
import ruamel.yaml as yaml
@ -27,10 +28,13 @@
import spack.database as spack_db
import spack.fetch_strategy as fs
import spack.hash_types as ht
import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.platforms
import spack.relocate as relocate
import spack.repo
import spack.store
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.spack_json as sjson
@ -975,8 +979,11 @@ def generate_key_index(key_prefix, tmpdir=None):
shutil.rmtree(tmpdir)
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
allow_root=False, key=None, regenerate_index=False):
def _build_tarball(
spec, outdir,
force=False, relative=False, unsigned=False,
allow_root=False, key=None, regenerate_index=False
):
"""
Build a tarball from given spec and put it into the directory structure
used at the mirror (following <tarball_directory_name>).
@ -1044,11 +1051,11 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
os.remove(temp_tarfile_path)
# create info for later relocation and create tar
write_buildinfo_file(spec, workdir, rel)
write_buildinfo_file(spec, workdir, relative)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
if rel:
if relative:
try:
make_package_relative(workdir, spec, allow_root)
except Exception as e:
@ -1096,7 +1103,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.layout.root)
buildinfo['relative_rpaths'] = rel
buildinfo['relative_rpaths'] = relative
spec_dict['buildinfo'] = buildinfo
with open(specfile_path, 'w') as outfile:
@ -1148,6 +1155,64 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
return None
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
"""Return the list of nodes to be packaged, given a list of specs.
Args:
specs (List[spack.spec.Spec]): list of root specs to be processed
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
spec in the nodes
"""
if not include_root and not include_dependencies:
return set()
def skip_node(current_node):
if current_node.external or current_node.virtual:
return True
return spack.store.db.query_one(current_node) is None
expanded_set = set()
for current_spec in specs:
if not include_dependencies:
nodes = [current_spec]
else:
nodes = [n for n in current_spec.traverse(
order='post', root=include_root, deptype=('link', 'run')
)]
for node in nodes:
if not skip_node(node):
expanded_set.add(node)
return expanded_set
def push(specs, push_url, specs_kwargs=None, **kwargs):
"""Create a binary package for each of the specs passed as input and push them
to a given push URL.
Args:
specs (List[spack.spec.Spec]): installed specs to be packaged
push_url (str): url where to push the binary package
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
and "include_dependencies", which determine which part of each spec is
packaged and pushed to the mirror
**kwargs: TODO
"""
specs_kwargs = specs_kwargs or {'include_root': True, 'include_dependencies': True}
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
# TODO: This seems to be an easy target for task
# TODO: distribution using a parallel pool
for node in nodes:
try:
_build_tarball(node, push_url, **kwargs)
except NoOverwriteException as e:
warnings.warn(str(e))
def download_tarball(spec, preferred_mirrors=None):
"""
Download binary tarball for given package into stage area, returning
@ -1486,6 +1551,66 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
os.remove(filename)
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
"""Install the root node of a concrete spec from a buildcache.
Checking the sha256 sum of a node before installation is usually needed only
for software installed during Spack's bootstrapping (since we might not have
a proper signature verification mechanism available).
Args:
spec: spec to be installed (note that only the root node will be installed)
allow_root (bool): allows the root directory to be present in binaries
(may affect relocation)
unsigned (bool): if True allows installing unsigned binaries
force (bool): force installation if the spec is already present in the
local store
sha256 (str): optional sha256 of the binary package, to be checked
before installation
"""
package = spack.repo.get(spec)
# Early termination
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.concrete and package.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return
tarball = download_tarball(spec)
if not tarball:
msg = 'download of binary cache file for spec "{0}" failed'
raise RuntimeError(msg.format(spec.format()))
if sha256:
checker = spack.util.crypto.Checker(sha256)
msg = 'cannot verify checksum for "{0}" [expected={1}]'
msg = msg.format(tarball, sha256)
if not checker.check(tarball):
raise spack.binary_distribution.NoChecksumException(msg)
tty.debug('Verified SHA256 checksum of the build cache')
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, tarball, allow_root, unsigned, force)
spack.hooks.post_install(spec)
spack.store.db.add(spec, spack.store.layout)
def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
"""Install a single concrete spec from a buildcache.
Args:
spec (spack.spec.Spec): spec to be installed
allow_root (bool): allows the root directory to be present in binaries
(may affect relocation)
unsigned (bool): if True allows installing unsigned binaries
force (bool): force installation if the spec is already present in the
local store
"""
for node in spec.traverse(root=True, order='post', deptype=('link', 'run')):
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
"""
Try to find the spec directly on the configured mirrors
@ -1937,3 +2062,73 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
continue
return False
def download_single_spec(
concrete_spec, destination, require_cdashid=False, mirror_url=None
):
"""Download the buildcache files for a single concrete spec.
Args:
concrete_spec: concrete spec to be downloaded
destination (str): path where to put the downloaded buildcache
require_cdashid (bool): if False the `.cdashid` file is optional
mirror_url (str): url of the mirror from which to download
"""
tarfile_name = tarball_name(concrete_spec, '.spack')
tarball_dir_name = tarball_directory_name(concrete_spec)
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
local_tarball_path = os.path.join(destination, tarball_dir_name)
files_to_fetch = [
{
'url': [tarball_path_name],
'path': local_tarball_path,
'required': True,
}, {
'url': [tarball_name(concrete_spec, '.spec.json'),
tarball_name(concrete_spec, '.spec.yaml')],
'path': destination,
'required': True,
}, {
'url': [tarball_name(concrete_spec, '.cdashid')],
'path': destination,
'required': require_cdashid,
},
]
return download_buildcache_entry(files_to_fetch, mirror_url)
class BinaryCacheQuery(object):
"""Callable object to query if a spec is in a binary cache"""
def __init__(self, all_architectures):
"""
Args:
all_architectures (bool): if True consider all the spec for querying,
otherwise restrict to the current default architecture
"""
self.all_architectures = all_architectures
specs = update_cache_and_get_specs()
if not self.all_architectures:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.satisfies(arch)]
self.possible_specs = specs
def __call__(self, spec, **kwargs):
matches = []
if spec.startswith('/'):
# Matching a DAG hash
query_hash = spec.replace('/', '')
for candidate_spec in self.possible_specs:
if candidate_spec.dag_hash().startswith(query_hash):
matches.append(candidate_spec)
else:
# Matching a spec constraint
matches = [
s for s in self.possible_specs if s.satisfies(spec)
]
return matches

View File

@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import argparse
import contextlib
import fnmatch
import functools
@ -255,11 +254,6 @@ def _read_metadata(self, package_name):
return data
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
# TODO: The local import is due to a circular import error. The
# TODO: correct fix for this is a refactor of the API used for
# TODO: binary relocation
import spack.cmd.buildcache
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
# Reconstruct the compiler that we need to use for bootstrapping
compiler_entry = {
@ -279,16 +273,18 @@ def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
'compilers', [{'compiler': compiler_entry}]
):
spec_str = '/' + pkg_hash
parser = argparse.ArgumentParser()
spack.cmd.buildcache.setup_parser(parser)
install_args = [
'install',
'--sha256', pkg_sha256,
'--only-root',
'-a', '-u', '-o', '-f', spec_str
]
args = parser.parse_args(install_args)
spack.cmd.buildcache.installtarball(args)
query = spack.binary_distribution.BinaryCacheQuery(
all_architectures=True
)
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
for match in matches:
spack.binary_distribution.install_root_node(
match,
allow_root=True,
unsigned=True,
force=True,
sha256=pkg_sha256
)
def _install_and_test(
self, abstract_spec, bincache_platform, bincache_data, test_fn

View File

@ -1412,15 +1412,26 @@ def read_cdashid_from_mirror(spec, mirror_url):
return int(contents)
def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries
tty.debug('Creating buildcache ({0})'.format(
'unsigned' if unsigned else 'signed'))
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.push_url_from_mirror_url(mirror_url)
spec_kwargs = {'include_root': True, 'include_dependencies': False}
kwargs = {
'force': True,
'allow_root': True,
'unsigned': unsigned
}
bindist.push(matches, push_url, spec_kwargs, **kwargs)
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
try:
unsigned = not sign_binaries
tty.debug('Creating buildcache ({0})'.format(
'unsigned' if unsigned else 'signed'))
spack.cmd.buildcache._createtarball(
env, spec_file=specfile_path, add_deps=False,
output_location=mirror_url, force=True, allow_root=True,
unsigned=unsigned)
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
except Exception as inst:
# If the mirror we're pushing to is on S3 and there's some
# permissions problem, for example, we can't just target

View File

@ -2,11 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os
import shutil
import sys
import tempfile
import warnings
import llnl.util.tty as tty
@ -40,7 +40,7 @@ def setup_parser(subparser):
setup_parser.parser = subparser
subparsers = subparser.add_subparsers(help='buildcache sub-commands')
create = subparsers.add_parser('create', help=createtarball.__doc__)
create = subparsers.add_parser('create', help=create_fn.__doc__)
create.add_argument('-r', '--rel', action='store_true',
help="make all rpaths relative" +
" before creating tarballs.")
@ -86,9 +86,9 @@ def setup_parser(subparser):
' decide to build a cache for only the package'
' or only the dependencies'))
arguments.add_common_arguments(create, ['specs'])
create.set_defaults(func=createtarball)
create.set_defaults(func=create_fn)
install = subparsers.add_parser('install', help=installtarball.__doc__)
install = subparsers.add_parser('install', help=install_fn.__doc__)
install.add_argument('-f', '--force', action='store_true',
help="overwrite install directory if it exists.")
install.add_argument('-m', '--multiple', action='store_true',
@ -102,16 +102,11 @@ def setup_parser(subparser):
install.add_argument('-o', '--otherarch', action='store_true',
help="install specs from other architectures" +
" instead of default platform and OS")
# This argument is needed by the bootstrapping logic to verify checksums
install.add_argument('--sha256', help=argparse.SUPPRESS)
install.add_argument(
'--only-root', action='store_true', help=argparse.SUPPRESS
)
arguments.add_common_arguments(install, ['specs'])
install.set_defaults(func=installtarball)
install.set_defaults(func=install_fn)
listcache = subparsers.add_parser('list', help=listspecs.__doc__)
listcache = subparsers.add_parser('list', help=list_fn.__doc__)
arguments.add_common_arguments(listcache, ['long', 'very_long'])
listcache.add_argument('-v', '--variants',
action='store_true',
@ -121,29 +116,25 @@ def setup_parser(subparser):
help="list specs for all available architectures" +
" instead of default platform and OS")
arguments.add_common_arguments(listcache, ['specs'])
listcache.set_defaults(func=listspecs)
listcache.set_defaults(func=list_fn)
dlkeys = subparsers.add_parser('keys', help=getkeys.__doc__)
dlkeys.add_argument(
keys = subparsers.add_parser('keys', help=keys_fn.__doc__)
keys.add_argument(
'-i', '--install', action='store_true',
help="install Keys pulled from mirror")
dlkeys.add_argument(
keys.add_argument(
'-t', '--trust', action='store_true',
help="trust all downloaded keys")
dlkeys.add_argument('-f', '--force', action='store_true',
help="force new download of keys")
dlkeys.set_defaults(func=getkeys)
keys.add_argument('-f', '--force', action='store_true',
help="force new download of keys")
keys.set_defaults(func=keys_fn)
preview_parser = subparsers.add_parser(
'preview',
help='analyzes an installed spec and reports whether '
'executables and libraries are relocatable'
)
arguments.add_common_arguments(preview_parser, ['installed_specs'])
preview_parser.set_defaults(func=preview)
preview = subparsers.add_parser('preview', help=preview_fn.__doc__)
arguments.add_common_arguments(preview, ['installed_specs'])
preview.set_defaults(func=preview_fn)
# Check if binaries need to be rebuilt on remote mirror
check = subparsers.add_parser('check', help=check_binaries.__doc__)
check = subparsers.add_parser('check', help=check_fn.__doc__)
check.add_argument(
'-m', '--mirror-url', default=None,
help='Override any configured mirrors with this mirror url')
@ -175,28 +166,28 @@ def setup_parser(subparser):
help="Default to rebuilding packages if errors are encountered " +
"during the process of checking whether rebuilding is needed")
check.set_defaults(func=check_binaries)
check.set_defaults(func=check_fn)
# Download tarball and specfile
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
dltarball.add_argument(
download = subparsers.add_parser('download', help=download_fn.__doc__)
download.add_argument(
'-s', '--spec', default=None,
help="Download built tarball for spec from mirror")
dltarball.add_argument(
download.add_argument(
'--spec-file', default=None,
help=("Download built tarball for spec (from json or yaml file) " +
"from mirror"))
dltarball.add_argument(
download.add_argument(
'-p', '--path', default=None,
help="Path to directory where tarball should be downloaded")
dltarball.add_argument(
download.add_argument(
'-c', '--require-cdashid', action='store_true', default=False,
help="Require .cdashid file to be downloaded with buildcache entry")
dltarball.set_defaults(func=get_tarball)
download.set_defaults(func=download_fn)
# Get buildcache name
getbuildcachename = subparsers.add_parser('get-buildcache-name',
help=get_buildcache_name.__doc__)
help=get_buildcache_name_fn.__doc__)
getbuildcachename.add_argument(
'-s', '--spec', default=None,
help='Spec string for which buildcache name is desired')
@ -204,11 +195,11 @@ def setup_parser(subparser):
'--spec-file', default=None,
help=('Path to spec json or yaml file for which buildcache name is ' +
'desired'))
getbuildcachename.set_defaults(func=get_buildcache_name)
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file
savespecfile = subparsers.add_parser('save-specfile',
help=save_specfiles.__doc__)
help=save_specfile_fn.__doc__)
savespecfile.add_argument(
'--root-spec', default=None,
help='Root spec of dependent spec')
@ -221,10 +212,10 @@ def setup_parser(subparser):
savespecfile.add_argument(
'--specfile-dir', default=None,
help='Path to directory where spec yamls should be saved')
savespecfile.set_defaults(func=save_specfiles)
savespecfile.set_defaults(func=save_specfile_fn)
# Copy buildcache from some directory to another mirror url
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
copy = subparsers.add_parser('copy', help=copy_fn.__doc__)
copy.add_argument(
'--base-dir', default=None,
help='Path to mirror directory (root of existing buildcache)')
@ -235,10 +226,10 @@ def setup_parser(subparser):
copy.add_argument(
'--destination-url', default=None,
help='Destination mirror url')
copy.set_defaults(func=buildcache_copy)
copy.set_defaults(func=copy_fn)
# Sync buildcache entries from one mirror to another
sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
sync = subparsers.add_parser('sync', help=sync_fn.__doc__)
source = sync.add_mutually_exclusive_group(required=True)
source.add_argument('--src-directory',
metavar='DIRECTORY',
@ -265,311 +256,115 @@ def setup_parser(subparser):
metavar='MIRROR_URL',
type=str,
help="URL of the destination mirror")
sync.set_defaults(func=buildcache_sync)
sync.set_defaults(func=sync_fn)
# Update buildcache index without copying any additional packages
update_index = subparsers.add_parser(
'update-index', help=buildcache_update_index.__doc__)
'update-index', help=update_index_fn.__doc__)
update_index.add_argument(
'-d', '--mirror-url', default=None, help='Destination mirror url')
update_index.add_argument(
'-k', '--keys', default=False, action='store_true',
help='If provided, key index will be updated as well as package index')
update_index.set_defaults(func=buildcache_update_index)
update_index.set_defaults(func=update_index_fn)
def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
Args:
pkgs (str): spec to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
Return:
list: list of specs
def _matching_specs(args):
"""Return a list of matching specs read from either a spec file (JSON or YAML),
a query over the store or a query over the active environment.
"""
hashes = env.all_hashes() if env else None
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
tty.debug('find_matching_specs: about to parse specs for {0}'.format(pkgs))
specs = spack.cmd.parse_specs(pkgs)
for spec in specs:
matching = spack.store.db.query(spec, hashes=hashes)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
tty.error('%s matches multiple installed packages:' % spec)
for match in matching:
tty.msg('"%s"' % match.format())
has_errors = True
# No installed package matches the query
if len(matching) == 0 and spec is not any:
tty.error('{0} does not match any installed packages.'.format(
spec))
has_errors = True
specs_from_cli.extend(matching)
if has_errors:
tty.die('use one of the matching specs above')
return specs_from_cli
def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
other_arch=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
Args:
specs: list of specs to be matched against buildcaches on mirror
allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
specs = bindist.update_cache_and_get_specs()
if not other_arch:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.satisfies(arch)]
for pkg in pkgs:
matches = []
tty.msg("buildcache spec(s) matching %s \n" % pkg)
for spec in sorted(specs):
if pkg.startswith('/'):
pkghash = pkg.replace('/', '')
if spec.dag_hash().startswith(pkghash):
matches.append(spec)
else:
if spec.satisfies(pkg):
matches.append(spec)
# For each pkg provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matches) > 1:
tty.error('%s matches multiple downloaded packages:' % pkg)
for match in matches:
tty.msg('"%s"' % match.format())
has_errors = True
# No downloaded package matches the query
if len(matches) == 0:
tty.error('%s does not match any downloaded packages.' % pkg)
has_errors = True
specs_from_cli.extend(matches)
if has_errors:
tty.die('use one of the matching specs above')
return specs_from_cli
def _createtarball(env, spec_file=None, packages=None, add_spec=True,
add_deps=True, output_location=os.getcwd(),
signing_key=None, force=False, make_relative=False,
unsigned=False, allow_root=False, rebuild_index=False):
if spec_file:
with open(spec_file, 'r') as fd:
specfile_contents = fd.read()
tty.debug('createtarball read specfile contents:')
tty.debug(specfile_contents)
if spec_file.endswith('.json'):
s = Spec.from_json(specfile_contents)
else:
s = Spec.from_yaml(specfile_contents)
package = '/{0}'.format(s.dag_hash())
matches = find_matching_specs(package, env=env)
elif packages:
matches = find_matching_specs(packages, env=env)
elif env:
matches = [env.specs_by_hash[h] for h in env.concretized_order]
else:
tty.die("build cache file creation requires at least one" +
" installed package spec, an active environment," +
" or else a path to a json or yaml file containing a spec" +
" to install")
specs = set()
mirror = spack.mirror.MirrorCollection().lookup(output_location)
outdir = url_util.format(mirror.push_url)
msg = 'Buildcache files will be output to %s/build_cache' % outdir
tty.msg(msg)
if matches:
tty.debug('Found at least one matching spec')
for match in matches:
tty.debug('examining match {0}'.format(match.format()))
if match.external or match.virtual:
tty.debug('skipping external or virtual spec %s' %
match.format())
else:
lookup = spack.store.db.query_one(match)
if not add_spec:
tty.debug('skipping matching root spec %s' % match.format())
elif lookup is None:
tty.debug('skipping uninstalled matching spec %s' %
match.format())
else:
tty.debug('adding matching spec %s' % match.format())
specs.add(match)
if not add_deps:
continue
tty.debug('recursing dependencies')
for d, node in match.traverse(order='post',
depth=True,
deptype=('link', 'run')):
# skip root, since it's handled above
if d == 0:
continue
lookup = spack.store.db.query_one(node)
if node.external or node.virtual:
tty.debug('skipping external or virtual dependency %s' %
node.format())
elif lookup is None:
tty.debug('skipping uninstalled depenendency %s' %
node.format())
else:
tty.debug('adding dependency %s' % node.format())
specs.add(node)
tty.debug('writing tarballs to %s/build_cache' % outdir)
for spec in specs:
tty.debug('creating binary cache file for package %s ' % spec.format())
try:
bindist.build_tarball(spec, outdir, force, make_relative,
unsigned, allow_root, signing_key,
rebuild_index)
except bindist.NoOverwriteException as e:
tty.warn(e)
def createtarball(args):
"""create a binary package from an existing install"""
# restrict matching to current environment if one is active
env = ev.active_environment()
hashes = env.all_hashes() if env else None
if args.spec_file:
return spack.store.specfile_matches(args.spec_file, hashes=hashes)
output_location = None
if args.specs:
constraints = spack.cmd.parse_specs(args.specs)
return spack.store.find(constraints, hashes=hashes)
if env:
return [env.specs_by_hash[h] for h in env.concretized_order]
tty.die("build cache file creation requires at least one" +
" installed package spec, an active environment," +
" or else a path to a json or yaml file containing a spec" +
" to install")
def _concrete_spec_from_args(args):
spec_str, specfile_path = args.spec, args.spec_file
if not spec_str and not specfile_path:
tty.error('must provide either spec string or path to YAML or JSON specfile')
sys.exit(1)
if spec_str:
try:
constraints = spack.cmd.parse_specs(spec_str)
spec = spack.store.find(constraints)[0]
spec.concretize()
except SpecError as spec_error:
tty.error('Unable to concretize spec {0}'.format(spec_str))
tty.debug(spec_error)
sys.exit(1)
return spec
return Spec.from_specfile(specfile_path)
def create_fn(args):
"""create a binary package and push it to a mirror"""
if args.directory:
output_location = args.directory
push_url = spack.mirror.push_url_from_directory(args.directory)
# User meant to provide a path to a local directory.
# Ensure that they did not accidentally pass a URL.
scheme = url_util.parse(output_location, scheme='<missing>').scheme
if scheme != '<missing>':
raise ValueError(
'"--directory" expected a local path; got a URL, instead')
if args.mirror_name:
push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name)
# User meant to provide a path to a local directory.
# Ensure that the mirror lookup does not mistake it for a named mirror.
output_location = 'file://' + output_location
if args.mirror_url:
push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url)
elif args.mirror_name:
output_location = args.mirror_name
matches = _matching_specs(args)
# User meant to provide the name of a preconfigured mirror.
# Ensure that the mirror lookup actually returns a named mirror.
result = spack.mirror.MirrorCollection().lookup(output_location)
if result.name == "<unnamed>":
raise ValueError(
'no configured mirror named "{name}"'.format(
name=output_location))
elif args.mirror_url:
output_location = args.mirror_url
# User meant to provide a URL for an anonymous mirror.
# Ensure that they actually provided a URL.
scheme = url_util.parse(output_location, scheme='<missing>').scheme
if scheme == '<missing>':
raise ValueError(
'"{url}" is not a valid URL'.format(url=output_location))
add_spec = ('package' in args.things_to_install)
add_deps = ('dependencies' in args.things_to_install)
_createtarball(env, spec_file=args.spec_file, packages=args.specs,
add_spec=add_spec, add_deps=add_deps,
output_location=output_location, signing_key=args.key,
force=args.force, make_relative=args.rel,
unsigned=args.unsigned, allow_root=args.allow_root,
rebuild_index=args.rebuild_index)
msg = 'Pushing binary packages to {0}/build_cache'.format(push_url)
tty.msg(msg)
specs_kwargs = {
'include_root': 'package' in args.things_to_install,
'include_dependencies': 'dependencies' in args.things_to_install
}
kwargs = {
'key': args.key,
'force': args.force,
'relative': args.rel,
'unsigned': args.unsigned,
'allow_root': args.allow_root,
'regenerate_index': args.rebuild_index
}
bindist.push(matches, push_url, specs_kwargs, **kwargs)
def installtarball(args):
def install_fn(args):
"""install from a binary package"""
if not args.specs:
tty.die("build cache file installation requires" +
" at least one package spec argument")
pkgs = set(args.specs)
matches = match_downloaded_specs(pkgs, args.multiple, args.force,
args.otherarch)
tty.die("a spec argument is required to install from a buildcache")
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
for match in matches:
install_tarball(match, args)
bindist.install_single_spec(
match,
allow_root=args.allow_root,
unsigned=args.unsigned,
force=args.force
)
def install_tarball(spec, args):
s = Spec(spec)
if s.external or s.virtual:
tty.warn("Skipping external or virtual package %s" % spec.format())
return
# This argument is used only for bootstrapping specs without signatures,
# since we need to check the sha256 of each tarball
if not args.only_root:
for d in s.dependencies(deptype=('link', 'run')):
tty.msg("Installing buildcache for dependency spec %s" % d)
install_tarball(d, args)
package = spack.repo.get(spec)
if s.concrete and package.installed and not args.force:
tty.warn("Package for spec %s already installed." % spec.format())
else:
tarball = bindist.download_tarball(spec)
if tarball:
if args.sha256:
checker = spack.util.crypto.Checker(args.sha256)
msg = ('cannot verify checksum for "{0}"'
' [expected={1}]')
msg = msg.format(tarball, args.sha256)
if not checker.check(tarball):
raise spack.binary_distribution.NoChecksumException(msg)
tty.debug('Verified SHA256 checksum of the build cache')
tty.msg('Installing buildcache for spec %s' % spec.format())
bindist.extract_tarball(spec, tarball, args.allow_root,
args.unsigned, args.force)
spack.hooks.post_install(spec)
spack.store.db.add(spec, spack.store.layout)
else:
tty.die('Download of binary cache file for spec %s failed.' %
spec.format())
def listspecs(args):
def list_fn(args):
"""list binary packages available from mirrors"""
specs = bindist.update_cache_and_get_specs()
try:
specs = bindist.update_cache_and_get_specs()
except bindist.FetchCacheError as e:
tty.error(e)
if not args.allarch:
arch = spack.spec.Spec.default_arch()
@ -587,19 +382,17 @@ def listspecs(args):
display_specs(specs, args, all_headers=True)
def getkeys(args):
def keys_fn(args):
"""get public keys available on mirrors"""
bindist.get_keys(args.install, args.trust, args.force)
def preview(args):
"""Print a status tree of the selected specs that shows which nodes are
relocatable and which might not be.
Args:
args: command line arguments
def preview_fn(args):
"""analyze an installed spec and reports whether executables
and libraries are relocatable
"""
specs = find_matching_specs(args.specs, allow_multiple_matches=True)
constraints = spack.cmd.parse_specs(args.specs)
specs = spack.store.find(constraints, multiple=True)
# Cycle over the specs that match
for spec in specs:
@ -608,7 +401,7 @@ def preview(args):
print(spec.tree(status_fn=spack.relocate.is_relocatable))
def check_binaries(args):
def check_fn(args):
"""Check specs (either a single spec from --spec, or else the full set
of release specs) against remote binary mirror(s) to see if any need
to be rebuilt. This command uses the process exit code to indicate
@ -616,7 +409,7 @@ def check_binaries(args):
one of the indicated specs needs to be rebuilt.
"""
if args.spec or args.spec_file:
specs = [get_concrete_spec(args)]
specs = [_concrete_spec_from_args(args)]
else:
env = spack.cmd.require_active_env(cmd_name='buildcache')
env.concretize()
@ -643,34 +436,7 @@ def check_binaries(args):
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
mirror_url=None):
tarfile_name = bindist.tarball_name(concrete_spec, '.spack')
tarball_dir_name = bindist.tarball_directory_name(concrete_spec)
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
local_tarball_path = os.path.join(local_dest, tarball_dir_name)
files_to_fetch = [
{
'url': [tarball_path_name],
'path': local_tarball_path,
'required': True,
}, {
'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
bindist.tarball_name(concrete_spec, '.spec.yaml')],
'path': local_dest,
'required': True,
}, {
'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
'path': local_dest,
'required': require_cdashid,
},
]
return bindist.download_buildcache_entry(files_to_fetch, mirror_url)
def get_tarball(args):
def download_fn(args):
"""Download buildcache entry from a remote mirror to local folder. This
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at
@ -685,54 +451,30 @@ def get_tarball(args):
tty.msg('No download path provided, exiting')
sys.exit(0)
spec = get_concrete_spec(args)
result = download_buildcache_files(spec, args.path, args.require_cdashid)
spec = _concrete_spec_from_args(args)
result = bindist.download_single_spec(
spec, args.path, require_cdashid=args.require_cdashid
)
if not result:
sys.exit(1)
def get_concrete_spec(args):
spec_str = args.spec
spec_yaml_path = args.spec_file
if not spec_str and not spec_yaml_path:
tty.msg('Must provide either spec string or path to ' +
'yaml to concretize spec')
sys.exit(1)
if spec_str:
try:
spec = find_matching_specs(spec_str)[0]
spec.concretize()
except SpecError as spec_error:
tty.error('Unable to concrectize spec {0}'.format(args.spec))
tty.debug(spec_error)
sys.exit(1)
return spec
with open(spec_yaml_path, 'r') as fd:
return Spec.from_yaml(fd.read())
def get_buildcache_name(args):
def get_buildcache_name_fn(args):
"""Get name (prefix) of buildcache entries for this spec"""
spec = get_concrete_spec(args)
spec = _concrete_spec_from_args(args)
buildcache_name = bindist.tarball_name(spec, '')
print('{0}'.format(buildcache_name))
sys.exit(0)
def save_specfiles(args):
def save_specfile_fn(args):
"""Get full spec for dependencies, relative to root spec, and write them
to files in the specified output directory. Uses exit code to signal
success or failure. An exit code of zero means the command was likely
successful. If any errors or exceptions are encountered, or if expected
command-line arguments are not provided, then the exit code will be
non-zero."""
non-zero.
"""
if not args.root_spec and not args.root_specfile:
tty.msg('No root spec provided, exiting.')
sys.exit(1)
@ -759,12 +501,15 @@ def save_specfiles(args):
sys.exit(0)
def buildcache_copy(args):
def copy_fn(args):
"""Copy a buildcache entry and all its files from one mirror, given as
'--base-dir', to some other mirror, specified as '--destination-url'.
The specific buildcache entry to be copied from one location to the
other is identified using the '--spec-file' argument."""
# TODO: This sub-command should go away once #11117 is merged
# TODO: Remove after v0.18.0 release
msg = ('"spack buildcache copy" is deprecated and will be removed from '
'Spack starting in v0.19.0')
warnings.warn(msg)
if not args.spec_file:
tty.msg('No spec yaml provided, exiting.')
@ -839,7 +584,7 @@ def buildcache_copy(args):
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
def buildcache_sync(args):
def sync_fn(args):
""" Syncs binaries (and associated metadata) from one mirror to another.
Requires an active environment in order to know which specs to sync.
@ -973,7 +718,7 @@ def update_index(mirror_url, update_keys=False):
bindist.generate_key_index(keys_url)
def buildcache_update_index(args):
def update_index_fn(args):
"""Update a buildcache index."""
outdir = '.'
if args.mirror_url:

View File

@ -397,8 +397,12 @@ def ci_rebuild(args):
tty.debug('Getting {0} buildcache from {1}'.format(
job_spec_pkg_name, matching_mirror))
tty.debug('Downloading to {0}'.format(build_cache_dir))
buildcache.download_buildcache_files(
job_spec, build_cache_dir, False, matching_mirror)
bindist.download_single_spec(
job_spec,
build_cache_dir,
require_cdashid=False,
mirror_url=matching_mirror
)
# Now we are done and successful
sys.exit(0)
@ -553,8 +557,8 @@ def ci_rebuild(args):
# per-PR mirror, if this is a PR pipeline
if buildcache_mirror_url:
spack_ci.push_mirror_contents(
env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
sign_binaries)
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
@ -568,8 +572,8 @@ def ci_rebuild(args):
# prefix is set)
if pipeline_mirror_url:
spack_ci.push_mirror_contents(
env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
sign_binaries)
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(

View File

@ -644,6 +644,35 @@ def _add_single_spec(spec, mirror, mirror_stats):
mirror_stats.error()
def push_url_from_directory(output_directory):
"""Given a directory in the local filesystem, return the URL on
which to push binary packages.
"""
scheme = url_util.parse(output_directory, scheme='<missing>').scheme
if scheme != '<missing>':
raise ValueError('expected a local path, but got a URL instead')
mirror_url = 'file://' + output_directory
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
def push_url_from_mirror_name(mirror_name):
"""Given a mirror name, return the URL on which to push binary packages."""
mirror = spack.mirror.MirrorCollection().lookup(mirror_name)
if mirror.name == "<unnamed>":
raise ValueError('no mirror named "{0}"'.format(mirror_name))
return url_util.format(mirror.push_url)
def push_url_from_mirror_url(mirror_url):
"""Given a mirror URL, return the URL on which to push binary packages."""
scheme = url_util.parse(mirror_url, scheme='<missing>').scheme
if scheme == '<missing>':
raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""

View File

@ -1863,6 +1863,15 @@ def to_yaml(self, stream=None, hash=ht.dag_hash):
def to_json(self, stream=None, hash=ht.dag_hash):
return sjson.dump(self.to_dict(hash), stream)
@staticmethod
def from_specfile(path):
"""Construct a spec from aJSON or YAML spec file path"""
with open(path, 'r') as fd:
file_content = fd.read()
if path.endswith('.json'):
return Spec.from_json(file_content)
return Spec.from_yaml(file_content)
@staticmethod
def from_node_dict(node):
spec = Spec()

View File

@ -29,6 +29,7 @@
import spack.config
import spack.database
import spack.directory_layout
import spack.error
import spack.paths
import spack.util.path
@ -284,6 +285,75 @@ def _construct_upstream_dbs_from_install_roots(
return accumulated_upstream_dbs
def find(constraints, multiple=False, query_fn=None, **kwargs):
"""Return a list of specs matching the constraints passed as inputs.
At least one spec per constraint must match, otherwise the function
will error with an appropriate message.
By default, this function queries the current store, but a custom query
function can be passed to hit any other source of concretized specs
(e.g. a binary cache).
The query function must accept a spec as its first argument.
Args:
constraints (List[spack.spec.Spec]): specs to be matched against
installed packages
multiple (bool): if True multiple matches per constraint are admitted
query_fn (Callable): query function to get matching specs. By default,
``spack.store.db.query``
**kwargs: keyword arguments forwarded to the query function
Return:
List of matching specs
"""
# Normalize input to list of specs
if isinstance(constraints, six.string_types):
constraints = [spack.spec.Spec(constraints)]
matching_specs, errors = [], []
query_fn = query_fn or spack.store.db.query
for spec in constraints:
current_matches = query_fn(spec, **kwargs)
# For each spec provided, make sure it refers to only one package.
if not multiple and len(current_matches) > 1:
msg_fmt = '"{0}" matches multiple packages: [{1}]'
errors.append(
msg_fmt.format(spec, ', '.join([m.format() for m in current_matches]))
)
# No installed package matches the query
if len(current_matches) == 0 and spec is not any:
msg_fmt = '"{0}" does not match any installed packages'
errors.append(msg_fmt.format(spec))
matching_specs.extend(current_matches)
if errors:
raise MatchError(
message="errors occurred when looking for specs in the store",
long_message='\n'.join(errors)
)
return matching_specs
def specfile_matches(filename, **kwargs):
"""Same as find but reads the query from a spec file.
Args:
filename (str): YAML or JSON file from which to read the query.
**kwargs: keyword arguments forwarded to "find"
Return:
List of matches
"""
query = [spack.spec.Spec.from_specfile(filename)]
return spack.store.find(query, **kwargs)
@contextlib.contextmanager
def use_store(store_or_path):
"""Use the store passed as argument within the context manager.
@ -314,3 +384,7 @@ def use_store(store_or_path):
store = original_store
db, layout = original_store.db, original_store.layout
root, unpadded_root = original_store.root, original_store.unpadded_root
class MatchError(spack.error.SpackError):
"""Error occurring when trying to match specs in store against a constraint"""

View File

@ -22,13 +22,12 @@ def test_build_tarball_overwrite(
install(str(spec))
# Runs fine the first time, throws the second time
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
# Should work fine with force=True
spack.binary_distribution.build_tarball(
spec, '.', force=True, unsigned=True)
spack.binary_distribution._build_tarball(spec, '.', force=True, unsigned=True)
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file
@ -38,4 +37,4 @@ def test_build_tarball_overwrite(
spack.binary_distribution.tarball_name(spec, '.spack')))
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
spack.binary_distribution._build_tarball(spec, '.', unsigned=True)

View File

@ -12,8 +12,8 @@
from jsonschema import ValidationError, validate
import spack
import spack.binary_distribution
import spack.ci as ci
import spack.cmd.buildcache as buildcache
import spack.compilers as compilers
import spack.config
import spack.environment as ev
@ -897,11 +897,11 @@ def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
set_env_var('SPACK_COMPILER_ACTION', 'NONE')
set_env_var('SPACK_REMOTE_MIRROR_URL', mirror_url)
def fake_dl_method(spec, dest, require_cdashid, m_url=None):
def fake_dl_method(spec, *args, **kwargs):
print('fake download buildcache {0}'.format(spec.name))
monkeypatch.setattr(
buildcache, 'download_buildcache_files', fake_dl_method)
spack.binary_distribution, 'download_single_spec', fake_dl_method)
ci_out = ci_cmd('rebuild', output=str)
@ -970,8 +970,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
install_cmd('--keep-stage', json_path)
# env, spec, json_path, mirror_url, build_id, sign_binaries
ci.push_mirror_contents(
env, concrete_spec, json_path, mirror_url, True)
ci.push_mirror_contents(env, json_path, mirror_url, True)
ci.write_cdashid_to_mirror('42', concrete_spec, mirror_url)
@ -1063,23 +1062,20 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
def faked(env, spec_file=None, packages=None, add_spec=True,
add_deps=True, output_location=os.getcwd(),
signing_key=None, force=False, make_relative=False,
unsigned=False, allow_root=False, rebuild_index=False):
def failing_access(*args, **kwargs):
raise Exception('Error: Access Denied')
import spack.cmd.buildcache as buildcache
monkeypatch.setattr(buildcache, '_createtarball', faked)
monkeypatch.setattr(spack.ci, '_push_mirror_contents', failing_access)
# Input doesn't matter, as wwe are faking exceptional output
url = 'fakejunk'
ci.push_mirror_contents(None, None, None, url, None)
ci.push_mirror_contents(None, None, url, None)
captured = capsys.readouterr()
std_out = captured[0]
expect_msg = 'Permission problem writing to {0}'.format(url)
assert(expect_msg in std_out)
assert expect_msg in std_out
def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,

View File

@ -909,3 +909,18 @@ def test_database_works_with_empty_dir(tmpdir):
db.query()
# Check that reading an empty directory didn't create a new index.json
assert not os.path.exists(db._index_path)
@pytest.mark.parametrize('query_arg,exc_type,msg_str', [
(['callpath'], spack.store.MatchError, 'matches multiple packages'),
(['tensorflow'], spack.store.MatchError, 'does not match any')
])
def test_store_find_failures(database, query_arg, exc_type, msg_str):
with pytest.raises(exc_type) as exc_info:
spack.store.find(query_arg, multiple=False)
assert msg_str in str(exc_info.value)
def test_store_find_accept_string(database):
result = spack.store.find('callpath', multiple=True)
assert len(result) == 3

View File

@ -511,7 +511,7 @@ _spack_buildcache_create() {
_spack_buildcache_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch --sha256 --only-root"
SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch"
else
_all_packages
fi