buildcache: Add sub-commands needed by release workflow

Adds four new sub-commands to the buildcache command:

1. save-yaml: Takes a root spec and a list of dependent spec names,
along with a directory in which to save yaml files, and writes out
the full spec.yaml for each of the dependent specs.  This only needs
to concretize the root spec once, then indexes it with the names of
the dependent specs.

2. check: Checks a spec (via either an abstract spec or via a full
spec.yaml) against remote mirror to see if it needs to be rebuilt.
Comparies full_hash stored on remote mirror with full_hash computed
locally to determine whether spec needs to be rebuilt.  Can also
generate list of specs to check against remote mirror by expanding
the set of release specs expressed in etc/spack/defaults/release.yaml.

3. get-buildcache-name: Makes it possible to attempt to read directly
the spec.yaml file on a remote or local mirror by providing the path
where the file should live based on concretizing the spec.

4. download: Downloads all buildcache files associated with a spec
on a remote mirror, including any .spack, .spec, and .cdashid files
that might exist.  Puts the files into the local path provided on
the command line, and organizes them in the same hierarchy found on
the remote mirror

This commit also refactors lib/spack/spack/util/web.py to expose
functionality allowing other modules to read data from a url.
This commit is contained in:
Scott Wittenburg
2018-12-17 16:23:16 -07:00
committed by Peter Scheibel
parent be4b95ee30
commit fce1c4133f
6 changed files with 610 additions and 79 deletions

View File

@@ -12,7 +12,9 @@
import hashlib
from contextlib import closing
import ruamel.yaml as yaml
import json
from six.moves.urllib.error import URLError
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, install_tree, get_filetype
@@ -21,12 +23,17 @@
import spack.fetch_strategy as fs
import spack.util.gpg as gpg_util
import spack.relocate as relocate
import spack.util.spack_yaml as syaml
from spack.spec import Spec
from spack.stage import Stage
from spack.util.gpg import Gpg
from spack.util.web import spider
from spack.util.web import spider, read_from_url
from spack.util.executable import ProcessError
_build_cache_relative_path = 'build_cache'
class NoOverwriteException(Exception):
"""
Raised when a file exists and must be overwritten.
@@ -90,11 +97,19 @@ def has_gnupg2():
return False
def build_cache_relative_path():
return _build_cache_relative_path
def build_cache_directory(prefix):
return os.path.join(prefix, build_cache_relative_path())
def buildinfo_file_name(prefix):
"""
Filename of the binary package meta-data file
"""
name = prefix + "/.spack/binary_distribution"
name = os.path.join(prefix, ".spack/binary_distribution")
return name
@@ -105,7 +120,7 @@ def read_buildinfo_file(prefix):
filename = buildinfo_file_name(prefix)
with open(filename, 'r') as inputfile:
content = inputfile.read()
buildinfo = yaml.load(content)
buildinfo = syaml.load(content)
return buildinfo
@@ -162,7 +177,7 @@ def write_buildinfo_file(prefix, workdir, rel=False):
buildinfo['relocate_links'] = link_to_relocate
filename = buildinfo_file_name(workdir)
with open(filename, 'w') as outfile:
outfile.write(yaml.dump(buildinfo, default_flow_style=True))
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
def tarball_directory_name(spec):
@@ -235,35 +250,50 @@ def sign_tarball(key, force, specfile_path):
Gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
def generate_index(outdir, indexfile_path):
f = open(indexfile_path, 'w')
def _generate_html_index(path_list, output_path):
f = open(output_path, 'w')
header = """<html>\n
<head>\n</head>\n
<list>\n"""
footer = "</list>\n</html>\n"
paths = os.listdir(outdir + '/build_cache')
f.write(header)
for path in paths:
for path in path_list:
rel = os.path.basename(path)
f.write('<li><a href="%s"> %s</a>\n' % (rel, rel))
f.write(footer)
f.close()
def generate_package_index(build_cache_dir):
yaml_list = os.listdir(build_cache_dir)
path_list = [os.path.join(build_cache_dir, l) for l in yaml_list]
index_html_path_tmp = os.path.join(build_cache_dir, 'index.html.tmp')
index_html_path = os.path.join(build_cache_dir, 'index.html')
_generate_html_index(path_list, index_html_path_tmp)
shutil.move(index_html_path_tmp, index_html_path)
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
allow_root=False, key=None):
allow_root=False, key=None, regenerate_index=False):
"""
Build a tarball from given spec and put it into the directory structure
used at the mirror (following <tarball_directory_name>).
"""
if not spec.concrete:
raise ValueError('spec must be concrete to build tarball')
# set up some paths
build_cache_dir = build_cache_directory(outdir)
tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_dir = os.path.join(outdir, "build_cache",
tarfile_dir = os.path.join(build_cache_dir,
tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
mkdirp(tarfile_dir)
spackfile_path = os.path.join(
outdir, "build_cache", tarball_path_name(spec, '.spack'))
build_cache_dir, tarball_path_name(spec, '.spack'))
if os.path.exists(spackfile_path):
if force:
os.remove(spackfile_path)
@@ -275,8 +305,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
specfile_name = tarball_name(spec, '.spec.yaml')
specfile_path = os.path.realpath(
os.path.join(outdir, "build_cache", specfile_name))
indexfile_path = os.path.join(outdir, "build_cache", "index.html")
os.path.join(build_cache_dir, specfile_name))
if os.path.exists(specfile_path):
if force:
os.remove(specfile_path)
@@ -319,7 +349,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
spec_dict = {}
with open(spec_file, 'r') as inputfile:
content = inputfile.read()
spec_dict = yaml.load(content)
spec_dict = syaml.load(content)
bchecksum = {}
bchecksum['hash_algorithm'] = 'sha256'
bchecksum['hash'] = checksum
@@ -330,8 +360,15 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.layout.root)
spec_dict['buildinfo'] = buildinfo
spec_dict['full_hash'] = spec.full_hash()
tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
spec_dict['full_hash'], spec.name, specfile_path))
tty.debug(spec.tree())
with open(specfile_path, 'w') as outfile:
outfile.write(yaml.dump(spec_dict))
outfile.write(syaml.dump(spec_dict))
# sign the tarball and spec file with gpg
if not unsigned:
sign_tarball(key, force, specfile_path)
@@ -349,9 +386,9 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
os.remove('%s.asc' % specfile_path)
# create an index.html for the build_cache directory so specs can be found
if os.path.exists(indexfile_path):
os.remove(indexfile_path)
generate_index(outdir, indexfile_path)
if regenerate_index:
generate_package_index(build_cache_dir)
return None
@@ -365,8 +402,8 @@ def download_tarball(spec):
tty.die("Please add a spack mirror to allow " +
"download of pre-compiled packages.")
tarball = tarball_path_name(spec, '.spack')
for key in mirrors:
url = mirrors[key] + "/build_cache/" + tarball
for mirror_name, mirror_url in mirrors.items():
url = mirror_url + '/' + _build_cache_relative_path + '/' + tarball
# stage the tarball into standard place
stage = Stage(url, name="build_cache", keep=True)
try:
@@ -493,7 +530,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
spec_dict = {}
with open(specfile_path, 'r') as inputfile:
content = inputfile.read()
spec_dict = yaml.load(content)
spec_dict = syaml.load(content)
bchecksum = spec_dict['binary_cache_checksum']
# if the checksums don't match don't install
@@ -563,10 +600,9 @@ def get_specs(force=False):
path = str(spack.architecture.sys_type())
urls = set()
for key in mirrors:
url = mirrors[key]
if url.startswith('file'):
mirror = url.replace('file://', '') + '/build_cache'
for mirror_name, mirror_url in mirrors.items():
if mirror_url.startswith('file'):
mirror = mirror_url.replace('file://', '') + "/" + _build_cache_relative_path
tty.msg("Finding buildcaches in %s" % mirror)
if os.path.exists(mirror):
files = os.listdir(mirror)
@@ -575,8 +611,8 @@ def get_specs(force=False):
link = 'file://' + mirror + '/' + file
urls.add(link)
else:
tty.msg("Finding buildcaches on %s" % url)
p, links = spider(url + "/build_cache")
tty.msg("Finding buildcaches on %s" % mirror_url)
p, links = spider(mirror_url + "/" + _build_cache_relative_path)
for link in links:
if re.search("spec.yaml", link) and re.search(path, link):
urls.add(link)
@@ -595,7 +631,7 @@ def get_specs(force=False):
# read the spec from the build cache file. All specs
# in build caches are concrete (as they are built) so
# we need to mark this spec concrete on read-in.
spec = spack.spec.Spec.from_yaml(f)
spec = Spec.from_yaml(f)
spec._mark_concrete()
_cached_specs.append(spec)
@@ -612,10 +648,10 @@ def get_keys(install=False, trust=False, force=False):
"download of build caches.")
keys = set()
for key in mirrors:
url = mirrors[key]
if url.startswith('file'):
mirror = url.replace('file://', '') + '/build_cache'
for mirror_name, mirror_url in mirrors.items():
if mirror_url.startswith('file'):
mirror = os.path.join(
mirror_url.replace('file://', ''), _build_cache_relative_path)
tty.msg("Finding public keys in %s" % mirror)
files = os.listdir(mirror)
for file in files:
@@ -623,8 +659,8 @@ def get_keys(install=False, trust=False, force=False):
link = 'file://' + mirror + '/' + file
keys.add(link)
else:
tty.msg("Finding public keys on %s" % url)
p, links = spider(url + "/build_cache", depth=1)
tty.msg("Finding public keys on %s" % mirror_url)
p, links = spider(mirror_url + "/build_cache", depth=1)
for link in links:
if re.search(r'\.key', link):
keys.add(link)
@@ -645,3 +681,148 @@ def get_keys(install=False, trust=False, force=False):
else:
tty.msg('Will not add this key to trusted keys.'
'Use -t to install all downloaded keys')
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
if not spec.concrete:
raise ValueError('spec must be concrete to check against mirror')
pkg_name = spec.name
pkg_version = spec.version
pkg_hash = spec.dag_hash()
pkg_full_hash = spec.full_hash()
tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
tty.debug(spec.tree())
# Try to retrieve the .spec.yaml directly, based on the known
# format of the name, in order to determine if the package
# needs to be rebuilt.
build_cache_dir = build_cache_directory(mirror_url)
spec_yaml_file_name = tarball_name(spec, '.spec.yaml')
file_path = os.path.join(build_cache_dir, spec_yaml_file_name)
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
spec.short_spec, '' if rebuild_on_errors else 'not ')
try:
yaml_contents = read_from_url(file_path)
except URLError as url_err:
err_msg = [
'Unable to determine whether {0} needs rebuilding,',
' caught URLError attempting to read from {1}.',
]
tty.error(''.join(err_msg).format(spec.short_spec, file_path))
tty.debug(url_err)
tty.warn(result_of_error)
return rebuild_on_errors
if not yaml_contents:
tty.error('Reading {0} returned nothing'.format(file_path))
tty.warn(result_of_error)
return rebuild_on_errors
spec_yaml = syaml.load(yaml_contents)
# If either the full_hash didn't exist in the .spec.yaml file, or it
# did, but didn't match the one we computed locally, then we should
# just rebuild. This can be simplified once the dag_hash and the
# full_hash become the same thing.
if ('full_hash' not in spec_yaml or
spec_yaml['full_hash'] != pkg_full_hash):
if 'full_hash' in spec_yaml:
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
spec_yaml['full_hash'], pkg_full_hash)
else:
reason = 'full_hash was missing from remote spec.yaml'
tty.msg('Rebuilding {0}, reason: {1}'.format(
spec.short_spec, reason))
tty.msg(spec.tree())
return True
return False
def check_specs_against_mirrors(mirrors, specs, output_file=None,
rebuild_on_errors=False):
"""Check all the given specs against buildcaches on the given mirrors and
determine if any of the specs need to be rebuilt. Reasons for needing to
rebuild include binary cache for spec isn't present on a mirror, or it is
present but the full_hash has changed since last time spec was built.
Arguments:
mirrors (dict): Mirrors to check against
specs (iterable): Specs to check against mirrors
output_file (string): Path to output file to be written. If provided,
mirrors with missing or out-of-date specs will be formatted as a
JSON object and written to this file.
rebuild_on_errors (boolean): Treat any errors encountered while
checking specs as a signal to rebuild package.
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
"""
rebuilds = {}
for mirror_name, mirror_url in mirrors.items():
tty.msg('Checking for built specs at %s' % mirror_url)
rebuild_list = []
for spec in specs:
if needs_rebuild(spec, mirror_url, rebuild_on_errors):
rebuild_list.append({
'short_spec': spec.short_spec,
'hash': spec.dag_hash()
})
if rebuild_list:
rebuilds[mirror_url] = {
'mirrorName': mirror_name,
'mirrorUrl': mirror_url,
'rebuildSpecs': rebuild_list
}
if output_file:
with open(output_file, 'w') as outf:
outf.write(json.dumps(rebuilds))
return 1 if rebuilds else 0
def _download_buildcache_entry(mirror_root, descriptions):
for description in descriptions:
url = os.path.join(mirror_root, description['url'])
path = description['path']
fail_if_missing = not description['required']
mkdirp(path)
stage = Stage(url, name="build_cache", path=path, keep=True)
try:
stage.fetch()
except fs.FetchError:
if fail_if_missing:
tty.error('Failed to download required url {0}'.format(url))
return False
return True
def download_buildcache_entry(file_descriptions):
mirrors = spack.config.get('mirrors')
if len(mirrors) == 0:
tty.die("Please add a spack mirror to allow " +
"download of buildcache entries.")
for mirror_name, mirror_url in mirrors.items():
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
if _download_buildcache_entry(mirror_root, file_descriptions):
return True
else:
continue
return False

View File

@@ -4,14 +4,21 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os
import sys
import llnl.util.tty as tty
import spack.cmd
import spack.environment as ev
from spack.error import SpecError
import spack.config
import spack.repo
import spack.store
import spack.spec
from spack.paths import etc_path
from spack.spec import Spec, save_dependency_spec_yamls
from spack.spec_set import CombinatorialSpecSet
import spack.binary_distribution as bindist
import spack.cmd.common.arguments as arguments
from spack.cmd import display_specs
@@ -43,6 +50,11 @@ def setup_parser(subparser):
create.add_argument('-d', '--directory', metavar='directory',
type=str, default='.',
help="directory in which to save the tarballs.")
create.add_argument('--no-rebuild-index', action='store_true',
default=False, help="skip rebuilding index after " +
"building package(s)")
create.add_argument('-y', '--spec-yaml', default=None,
help='Create buildcache entry for spec from yaml file')
create.add_argument(
'packages', nargs=argparse.REMAINDER,
help="specs of packages to create buildcache for")
@@ -88,6 +100,81 @@ def setup_parser(subparser):
help="force new download of keys")
dlkeys.set_defaults(func=getkeys)
# Check if binaries need to be rebuilt on remote mirror
check = subparsers.add_parser('check', help=check_binaries.__doc__)
check.add_argument(
'-m', '--mirror-url', default=None,
help='Override any configured mirrors with this mirror url')
check.add_argument(
'-o', '--output-file', default=None,
help='File where rebuild info should be written')
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
check.add_argument(
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope containing mirrors to check")
check.add_argument(
'-s', '--spec', default=None,
help='Check single spec instead of release specs file')
check.add_argument(
'-y', '--spec-yaml', default=None,
help='Check single spec from yaml file instead of release specs file')
check.add_argument(
'--rebuild-on-error', default=False, action='store_true',
help="Default to rebuilding packages if errors are encountered " +
"during the process of checking whether rebuilding is needed")
check.set_defaults(func=check_binaries)
# Download tarball and spec.yaml
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
dltarball.add_argument(
'-s', '--spec', default=None,
help="Download built tarball for spec from mirror")
dltarball.add_argument(
'-y', '--spec-yaml', default=None,
help="Download built tarball for spec (from yaml file) from mirror")
dltarball.add_argument(
'-p', '--path', default=None,
help="Path to directory where tarball should be downloaded")
dltarball.add_argument(
'-c', '--require-cdashid', action='store_true', default=False,
help="Require .cdashid file to be downloaded with buildcache entry")
dltarball.set_defaults(func=get_tarball)
# Get buildcache name
getbuildcachename = subparsers.add_parser('get-buildcache-name',
help=get_buildcache_name.__doc__)
getbuildcachename.add_argument(
'-s', '--spec', default=None,
help='Spec string for which buildcache name is desired')
getbuildcachename.add_argument(
'-y', '--spec-yaml', default=None,
help='Path to spec yaml file for which buildcache name is desired')
getbuildcachename.set_defaults(func=get_buildcache_name)
# Given the root spec, save the yaml of the dependent spec to a file
saveyaml = subparsers.add_parser('save-yaml',
help=save_spec_yamls.__doc__)
saveyaml.add_argument(
'-r', '--root-spec', default=None,
help='Root spec of dependent spec')
saveyaml.add_argument(
'-s', '--specs', default=None,
help='List of dependent specs for which saved yaml is desired')
saveyaml.add_argument(
'-y', '--yaml-dir', default=None,
help='Path to directory where spec yamls should be saved')
saveyaml.set_defaults(func=save_spec_yamls)
def find_matching_specs(
pkgs, allow_multiple_matches=False, force=False, env=None):
@@ -106,6 +193,7 @@ def find_matching_specs(
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
tty.debug('find_matching_specs: about to parse specs for {0}'.format(pkgs))
specs = spack.cmd.parse_specs(pkgs)
for spec in specs:
matching = spack.store.db.query(spec, hashes=hashes)
@@ -178,10 +266,22 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
def createtarball(args):
"""create a binary package from an existing install"""
if not args.packages:
if args.spec_yaml:
packages = set()
tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
with open(args.spec_yaml, 'r') as fd:
yaml_text = fd.read()
tty.debug('createtarball read spec yaml:')
tty.debug(yaml_text)
s = Spec.from_yaml(yaml_text)
packages.add('/{0}'.format(s.dag_hash()))
elif args.packages:
packages = args.packages
else:
tty.die("build cache file creation requires at least one" +
" installed package argument")
pkgs = set(args.packages)
" installed package argument or else path to a" +
" yaml file containing a spec to install")
pkgs = set(packages)
specs = set()
outdir = '.'
if args.directory:
@@ -194,7 +294,12 @@ def createtarball(args):
env = ev.get_env(args, 'buildcache create')
matches = find_matching_specs(pkgs, False, False, env=env)
if matches:
tty.msg('Found at least one matching spec')
for match in matches:
tty.msg('examining match {0}'.format(match.format()))
if match.external or match.virtual:
tty.msg('skipping external or virtual spec %s' %
match.format())
@@ -217,7 +322,8 @@ def createtarball(args):
for spec in specs:
tty.msg('creating binary cache file for package %s ' % spec.format())
bindist.build_tarball(spec, outdir, args.force, args.rel,
args.unsigned, args.allow_root, signkey)
args.unsigned, args.allow_root, signkey,
not args.no_rebuild_index)
def installtarball(args):
@@ -233,7 +339,7 @@ def installtarball(args):
def install_tarball(spec, args):
s = spack.spec.Spec(spec)
s = Spec(spec)
if s.external or s.virtual:
tty.warn("Skipping external or virtual package %s" % spec.format())
return
@@ -272,6 +378,151 @@ def getkeys(args):
bindist.get_keys(args.install, args.trust, args.force)
def check_binaries(args):
"""Check specs (either a single spec from --spec, or else the full set
of release specs) against remote binary mirror(s) to see if any need
to be rebuilt. This command uses the process exit code to indicate
its result, specifically, if the exit code is non-zero, then at least
one of the indicated specs needs to be rebuilt.
"""
if args.spec or args.spec_yaml:
specs = [get_concrete_spec(args)]
else:
release_specs_path = os.path.join(
etc_path, 'spack', 'defaults', 'release.yaml')
spec_set = CombinatorialSpecSet.from_file(release_specs_path)
specs = [spec for spec in spec_set]
if not specs:
tty.msg('No specs provided, exiting.')
sys.exit(0)
for spec in specs:
spec.concretize()
# Next see if there are any configured binary mirrors
configured_mirrors = spack.config.get('mirrors', scope=args.scope)
if args.mirror_url:
configured_mirrors = {'additionalMirrorUrl': args.mirror_url}
if not configured_mirrors:
tty.msg('No mirrors provided, exiting.')
sys.exit(0)
sys.exit(bindist.check_specs_against_mirrors(
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
def get_tarball(args):
"""Download buildcache entry from a remote mirror to local folder. This
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at
least one of the required buildcache components. Normally, just the
tarball and .spec.yaml files are required, but if the --require-cdashid
argument was provided, then a .cdashid file is also required."""
if not args.spec and not args.spec_yaml:
tty.msg('No specs provided, exiting.')
sys.exit(0)
if not args.path:
tty.msg('No download path provided, exiting')
sys.exit(0)
spec = get_concrete_spec(args)
tarfile_name = bindist.tarball_name(spec, '.spack')
tarball_dir_name = bindist.tarball_directory_name(spec)
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
local_tarball_path = os.path.join(args.path, tarball_dir_name)
files_to_fetch = [
{
'url': tarball_path_name,
'path': local_tarball_path,
'required': True,
}, {
'url': bindist.tarball_name(spec, '.spec.yaml'),
'path': args.path,
'required': True,
}, {
'url': bindist.tarball_name(spec, '.cdashid'),
'path': args.path,
'required': args.require_cdashid,
},
]
result = bindist.download_buildcache_entry(files_to_fetch)
if result:
sys.exit(0)
sys.exit(1)
def get_concrete_spec(args):
spec_str = args.spec
spec_yaml_path = args.spec_yaml
if not spec_str and not spec_yaml_path:
tty.msg('Must provide either spec string or path to ' +
'yaml to concretize spec')
sys.exit(1)
if spec_str:
try:
spec = Spec(spec_str)
spec.concretize()
except SpecError as spec_error:
tty.error('Unable to concrectize spec {0}'.format(args.spec))
tty.debug(spec_error)
sys.exit(1)
return spec
with open(spec_yaml_path, 'r') as fd:
return Spec.from_yaml(fd.read())
def get_buildcache_name(args):
"""Get name (prefix) of buildcache entries for this spec"""
spec = get_concrete_spec(args)
buildcache_name = bindist.tarball_name(spec, '')
print('{0}'.format(buildcache_name))
sys.exit(0)
def save_spec_yamls(args):
"""Get full spec for dependencies, relative to root spec, and write them
to files in the specified output directory. Uses exit code to signal
success or failure. An exit code of zero means the command was likely
successful. If any errors or exceptions are encountered, or if expected
command-line arguments are not provided, then the exit code will be
non-zero."""
if not args.root_spec:
tty.msg('No root spec provided, exiting.')
sys.exit(1)
if not args.specs:
tty.msg('No dependent specs provided, exiting.')
sys.exit(1)
if not args.yaml_dir:
tty.msg('No yaml directory provided, exiting.')
sys.exit(1)
root_spec = Spec(args.root_spec)
root_spec.concretize()
root_spec_as_yaml = root_spec.to_yaml(all_deps=True)
save_dependency_spec_yamls(
root_spec_as_yaml, args.yaml_dir, args.specs.split())
sys.exit(0)
def buildcache(parser, args):
if args.func:
args.func(args)

View File

@@ -3695,6 +3695,33 @@ def parse_anonymous_spec(spec_like, pkg_name):
return anon_spec
def save_dependency_spec_yamls(
root_spec_as_yaml, output_directory, dependencies=None):
"""Given a root spec (represented as a yaml object), index it with a subset
of its dependencies, and write each dependency to a separate yaml file
in the output directory. By default, all dependencies will be written
out. To choose a smaller subset of dependencies to be written, pass a
list of package names in the dependencies parameter. In case of any
kind of error, SaveSpecDependenciesError is raised with a specific
message about what went wrong."""
root_spec = Spec.from_yaml(root_spec_as_yaml)
dep_list = dependencies
if not dep_list:
dep_list = [dep.name for dep in root_spec.traverse()]
for dep_name in dep_list:
if dep_name not in root_spec:
msg = 'Dependency {0} does not exist in root spec {1}'.format(
dep_name, root_spec.name)
raise SpecDependencyNotFoundError(msg)
dep_spec = root_spec[dep_name]
yaml_path = os.path.join(output_directory, '{0}.yaml'.format(dep_name))
with open(yaml_path, 'w') as fd:
fd.write(dep_spec.to_yaml(all_deps=True))
def base32_prefix_bits(hash_string, bits):
"""Return the first <bits> bits of a base32 string as an integer."""
if bits > len(hash_string) * 5:
@@ -3880,3 +3907,8 @@ def __init__(self, spec, matches):
long_message += match_fmt_custom.format(idx + 1, c, w, msg)
super(ConflictsInSpecError, self).__init__(message, long_message)
class SpecDependencyNotFoundError(SpecError):
"""Raised when a failure is encountered writing the dependencies of
a spec."""

View File

@@ -8,12 +8,16 @@
YAML format preserves DAG information in the spec.
"""
import os
from collections import Iterable, Mapping
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
from spack.spec import Spec
from spack import repo
from spack.spec import Spec, save_dependency_spec_yamls
from spack.util.spack_yaml import syaml_dict
from spack.test.conftest import MockPackage, MockPackageMultiRepo
def check_yaml_round_trip(spec):
@@ -198,3 +202,38 @@ def reverse_all_dicts(data):
return type(data)(reverse_all_dicts(elt) for elt in data)
else:
return data
def check_specs_equal(original_spec, spec_yaml_path):
with open(spec_yaml_path, 'r') as fd:
spec_yaml = fd.read()
spec_from_yaml = Spec.from_yaml(spec_yaml)
return original_spec.eq_dag(spec_from_yaml)
def test_save_dependency_spec_yamls_subset(tmpdir, config):
output_path = str(tmpdir.mkdir('spec_yamls'))
default = ('build', 'link')
g = MockPackage('g', [], [])
f = MockPackage('f', [], [])
e = MockPackage('e', [], [])
d = MockPackage('d', [f, g], [default, default])
c = MockPackage('c', [], [])
b = MockPackage('b', [d, e], [default, default])
a = MockPackage('a', [b, c], [default, default])
mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g])
with repo.swap(mock_repo):
spec_a = Spec('a')
spec_a.concretize()
b_spec = spec_a['b']
c_spec = spec_a['c']
spec_a_yaml = spec_a.to_yaml(all_deps=True)
save_dependency_spec_yamls(spec_a_yaml, output_path, ['b', 'c'])
assert check_specs_equal(b_spec, os.path.join(output_path, 'b.yaml'))
assert check_specs_equal(c_spec, os.path.join(output_path, 'c.yaml'))

View File

@@ -86,6 +86,58 @@ def __init__(self, *args, **kwargs):
super(NonDaemonPool, self).__init__(*args, **kwargs)
def _read_from_url(url, accept_content_type=None):
context = None
verify_ssl = spack.config.get('config:verify_ssl')
pyver = sys.version_info
if (pyver < (2, 7, 9) or (3,) < pyver < (3, 4, 3)):
if verify_ssl:
tty.warn("Spack will not check SSL certificates. You need to "
"update your Python to enable certificate "
"verification.")
elif verify_ssl:
# without a defined context, urlopen will not verify the ssl cert for
# python 3.x
context = ssl.create_default_context()
else:
context = ssl._create_unverified_context()
req = Request(url)
if accept_content_type:
# Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files.
# It would be nice to do this with the HTTP Accept header to avoid
# one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html.
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=_timeout, context=context)
if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
return None, None
if not resp.headers["Content-type"].startswith(accept_content_type):
tty.debug("ignoring page " + url + " with content type " +
resp.headers["Content-type"])
return None, None
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
response = _urlopen(req, timeout=_timeout, context=context)
response_url = response.geturl()
# Read the page and and stick it in the map we'll return
page = response.read().decode('utf-8')
return response_url, page
def read_from_url(url, accept_content_type=None):
resp_url, contents = _read_from_url(url, accept_content_type)
return contents
def _spider(url, visited, root, depth, max_depth, raise_on_error):
"""Fetches URL and any pages it links to up to max_depth.
@@ -107,46 +159,11 @@ def _spider(url, visited, root, depth, max_depth, raise_on_error):
root = re.sub('/index.html$', '', root)
try:
context = None
verify_ssl = spack.config.get('config:verify_ssl')
pyver = sys.version_info
if (pyver < (2, 7, 9) or (3,) < pyver < (3, 4, 3)):
if verify_ssl:
tty.warn("Spack will not check SSL certificates. You need to "
"update your Python to enable certificate "
"verification.")
elif verify_ssl:
# We explicitly create default context to avoid error described in
# https://blog.sucuri.net/2016/03/beware-unverified-tls-certificates-php-python.html
context = ssl.create_default_context()
else:
context = ssl._create_unverified_context()
response_url, page = _read_from_url(url, 'text/html')
# Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files.
# It would be nice to do this with the HTTP Accept header to avoid
# one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html.
req = Request(url)
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=_timeout, context=context)
if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
if not response_url or not page:
return pages, links
if not resp.headers["Content-type"].startswith('text/html'):
tty.debug("ignoring page " + url + " with content type " +
resp.headers["Content-type"])
return pages, links
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
response = _urlopen(req, timeout=_timeout, context=context)
response_url = response.geturl()
# Read the page and and stick it in the map we'll return
page = response.read().decode('utf-8')
pages[response_url] = page
# Parse out the links in the page

View File

@@ -0,0 +1,11 @@
<html>
<head>
</head>
<body>
<ul>
{% for bucket_key in top_level_keys %}
<li><a href="{{ bucket_key }}">{{ bucket_key }}</a></li>
{% endfor %}
</ul>
</body>
</html>