Use json for buildcache index (#15002)
* Start moving toward a json buildcache index * Add spec and database index schemas * Add a schema for buildcache spec.yaml files * Provide a mode for database class to generate buildcache index * Update db and ci tests to validate object w/ new schema * Remove unused temporary upload-s3 command * Use database class to generate buildcache index * Do not generate index with each buildcache creation * Make buildcache index mode into a couple of constructor args to Database class * Use keyword args for _createtarball * Parse new json index when we get specs from buildcache Now that only one index file per mirror needs to be fetched in order to have all the concrete specs for binaries available on the mirror, we can just fetch and refresh the cached specs every time instead of needing to use the '-f' flag to force re-reading.
This commit is contained in:
@@ -25,6 +25,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.util.gpg
|
||||
import spack.relocate as relocate
|
||||
@@ -32,7 +33,6 @@
|
||||
import spack.mirror
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.gpg import Gpg
|
||||
@@ -282,31 +282,47 @@ def sign_tarball(key, force, specfile_path):
|
||||
def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
Creates (or replaces) the "index.html" page at the location given in
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (*.yaml)
|
||||
and public key (*.key) under cache_prefix.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
index_html_path = os.path.join(tmpdir, 'index.html')
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if (entry.endswith('.yaml')
|
||||
or entry.endswith('.key')))
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count'])
|
||||
|
||||
with open(index_html_path, 'w') as f:
|
||||
f.write(BUILD_CACHE_INDEX_TEMPLATE.format(
|
||||
title='Spack Package Index',
|
||||
path_list='\n'.join(
|
||||
BUILD_CACHE_INDEX_ENTRY_TEMPLATE.format(path=path)
|
||||
for path in file_list)))
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml'))
|
||||
|
||||
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
for file_path in file_list:
|
||||
try:
|
||||
yaml_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(yaml_url))
|
||||
_, _, yaml_file = web_util.read_from_url(yaml_url)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
# yaml_obj = syaml.load(yaml_contents)
|
||||
# s = Spec.from_yaml(yaml_obj)
|
||||
s = Spec.from_yaml(yaml_contents)
|
||||
db.add(s, None)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading spec.yaml: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
try:
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
web_util.push_to_url(
|
||||
index_html_path,
|
||||
url_util.join(cache_prefix, 'index.html'),
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/html'})
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -825,49 +841,55 @@ def get_spec(spec=None, force=False):
|
||||
return try_download_specs(urls=urls, force=force)
|
||||
|
||||
|
||||
def get_specs(force=False, allarch=False):
|
||||
def get_specs(allarch=False):
|
||||
"""
|
||||
Get spec.yaml's for build caches available on mirror
|
||||
"""
|
||||
global _cached_specs
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'default_os', 'default_target')
|
||||
arch_pattern = ('([^-]*-[^-]*-[^-]*)')
|
||||
if not allarch:
|
||||
arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os)
|
||||
|
||||
regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern)
|
||||
arch_re = re.compile(regex_pattern)
|
||||
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
urls = set()
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
fetch_url_build_cache = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding buildcaches in %s" % mirror_dir)
|
||||
if os.path.exists(mirror_dir):
|
||||
files = os.listdir(mirror_dir)
|
||||
for file in files:
|
||||
m = arch_re.search(file)
|
||||
if m:
|
||||
link = url_util.join(fetch_url_build_cache, file)
|
||||
urls.add(link)
|
||||
else:
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
p, links = web_util.spider(
|
||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
||||
for link in links:
|
||||
m = arch_re.search(link)
|
||||
if m:
|
||||
urls.add(link)
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
|
||||
return try_download_specs(urls=urls, force=force)
|
||||
index_url = url_util.join(fetch_url_build_cache, 'index.json')
|
||||
|
||||
try:
|
||||
_, _, file_stream = web_util.read_from_url(
|
||||
index_url, 'application/json')
|
||||
index_object = codecs.getreader('utf-8')(file_stream).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Failed to read index {0}'.format(index_url))
|
||||
tty.debug(url_err)
|
||||
# Just return whatever specs we may already have cached
|
||||
return _cached_specs
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
index_file_path = os.path.join(tmpdir, 'index.json')
|
||||
with open(index_file_path, 'w') as fd:
|
||||
fd.write(index_object)
|
||||
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False)
|
||||
|
||||
db._read_from_file(index_file_path)
|
||||
spec_list = db.query_local(installed=False)
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
spec_arch = architecture.arch_for_spec(indexed_spec.architecture)
|
||||
if (allarch is True or spec_arch == arch):
|
||||
_cached_specs.add(indexed_spec)
|
||||
|
||||
return _cached_specs
|
||||
|
||||
|
||||
def get_keys(install=False, trust=False, force=False):
|
||||
|
@@ -1025,9 +1025,9 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id):
|
||||
if mirror_url:
|
||||
tty.debug('Creating buildcache')
|
||||
buildcache._createtarball(env, yaml_path, None, True, False,
|
||||
mirror_url, None, True, False, False, True,
|
||||
False)
|
||||
buildcache._createtarball(env, spec_yaml=yaml_path, add_deps=False,
|
||||
output_location=mirror_url, force=True,
|
||||
allow_root=True)
|
||||
if build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
build_id, mirror_url))
|
||||
|
@@ -68,9 +68,9 @@ def setup_parser(subparser):
|
||||
type=str,
|
||||
help="URL of the mirror where " +
|
||||
"buildcaches will be written.")
|
||||
create.add_argument('--no-rebuild-index', action='store_true',
|
||||
default=False, help="skip rebuilding index after " +
|
||||
"building package(s)")
|
||||
create.add_argument('--rebuild-index', action='store_true',
|
||||
default=False, help="Regenerate buildcache index " +
|
||||
"after building package(s)")
|
||||
create.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Create buildcache entry for spec from yaml file')
|
||||
create.add_argument('--only', default='package,dependencies',
|
||||
@@ -108,8 +108,6 @@ def setup_parser(subparser):
|
||||
action='store_true',
|
||||
dest='variants',
|
||||
help='show variants in output (can be long)')
|
||||
listcache.add_argument('-f', '--force', action='store_true',
|
||||
help="force new download of specs")
|
||||
listcache.add_argument('-a', '--allarch', action='store_true',
|
||||
help="list specs for all available architectures" +
|
||||
" instead of default platform and OS")
|
||||
@@ -291,7 +289,7 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
allarch = other_arch
|
||||
specs = bindist.get_specs(force, allarch)
|
||||
specs = bindist.get_specs(allarch)
|
||||
for pkg in pkgs:
|
||||
matches = []
|
||||
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
||||
@@ -323,9 +321,10 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
output_location, key, force, rel, unsigned, allow_root,
|
||||
no_rebuild_index):
|
||||
def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_yaml:
|
||||
packages = set()
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
@@ -355,10 +354,6 @@ def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
msg = 'Buildcache files will be output to %s/build_cache' % outdir
|
||||
tty.msg(msg)
|
||||
|
||||
signkey = None
|
||||
if key:
|
||||
signkey = key
|
||||
|
||||
matches = find_matching_specs(pkgs, env=env)
|
||||
|
||||
if matches:
|
||||
@@ -398,9 +393,9 @@ def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
|
||||
for spec in specs:
|
||||
tty.debug('creating binary cache file for package %s ' % spec.format())
|
||||
bindist.build_tarball(spec, outdir, force, rel,
|
||||
unsigned, allow_root, signkey,
|
||||
not no_rebuild_index)
|
||||
bindist.build_tarball(spec, outdir, force, make_relative,
|
||||
unsigned, allow_root, signing_key,
|
||||
rebuild_index)
|
||||
|
||||
|
||||
def createtarball(args):
|
||||
@@ -447,9 +442,12 @@ def createtarball(args):
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, args.spec_yaml, args.specs, add_spec, add_deps,
|
||||
output_location, args.key, args.force, args.rel,
|
||||
args.unsigned, args.allow_root, args.no_rebuild_index)
|
||||
_createtarball(env, spec_yaml=args.spec_yaml, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
unsigned=args.unsigned, allow_root=args.allow_root,
|
||||
rebuild_index=args.rebuild_index)
|
||||
|
||||
|
||||
def installtarball(args):
|
||||
@@ -458,8 +456,7 @@ def installtarball(args):
|
||||
tty.die("build cache file installation requires" +
|
||||
" at least one package spec argument")
|
||||
pkgs = set(args.specs)
|
||||
matches = match_downloaded_specs(pkgs, args.multiple, args.force,
|
||||
args.otherarch)
|
||||
matches = match_downloaded_specs(pkgs, args.multiple, args.otherarch)
|
||||
|
||||
for match in matches:
|
||||
install_tarball(match, args)
|
||||
@@ -491,7 +488,7 @@ def install_tarball(spec, args):
|
||||
|
||||
def listspecs(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
specs = bindist.get_specs(args.force, args.allarch)
|
||||
specs = bindist.get_specs(args.allarch)
|
||||
if args.specs:
|
||||
constraints = set(args.specs)
|
||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
||||
|
@@ -1,214 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# TODO: This will be merged into the buildcache command once
|
||||
# everything is working.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
import boto3
|
||||
import botocore
|
||||
have_boto3_support = True
|
||||
except ImportError:
|
||||
have_boto3_support = False
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.error import SpackError
|
||||
import spack.tengine as template_engine
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
|
||||
|
||||
description = "temporary command to upload buildcaches to 's3.spack.io'"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help='upload-s3 sub-commands')
|
||||
|
||||
# sub-command to upload a built spec to s3
|
||||
spec = subparsers.add_parser('spec', help=upload_spec.__doc__)
|
||||
|
||||
spec.add_argument('-s', '--spec', default=None,
|
||||
help='Spec to upload')
|
||||
|
||||
spec.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Path to spec yaml file containing spec to upload')
|
||||
|
||||
spec.add_argument('-b', '--base-dir', default=None,
|
||||
help='Path to root of buildcaches')
|
||||
|
||||
spec.add_argument('-e', '--endpoint-url',
|
||||
default='https://s3.spack.io', help='URL of mirror')
|
||||
|
||||
spec.set_defaults(func=upload_spec)
|
||||
|
||||
# sub-command to update the index of a buildcache on s3
|
||||
index = subparsers.add_parser('index', help=update_index.__doc__)
|
||||
|
||||
index.add_argument('-e', '--endpoint-url',
|
||||
default='https://s3.spack.io', help='URL of mirror')
|
||||
|
||||
index.set_defaults(func=update_index)
|
||||
|
||||
|
||||
def get_s3_session(endpoint_url):
|
||||
if not have_boto3_support:
|
||||
raise SpackError('boto3 module not available')
|
||||
|
||||
session = boto3.Session()
|
||||
s3 = session.resource('s3', endpoint_url=endpoint_url)
|
||||
|
||||
bucket_names = []
|
||||
for bucket in s3.buckets.all():
|
||||
bucket_names.append(bucket.name)
|
||||
|
||||
if len(bucket_names) > 1:
|
||||
raise SpackError('More than one bucket associated with credentials')
|
||||
|
||||
bucket_name = bucket_names[0]
|
||||
|
||||
return s3, bucket_name
|
||||
|
||||
|
||||
def update_index(args):
|
||||
"""Update the index of an s3 buildcache"""
|
||||
s3, bucket_name = get_s3_session(args.endpoint_url)
|
||||
|
||||
bucket = s3.Bucket(bucket_name)
|
||||
exists = True
|
||||
|
||||
try:
|
||||
s3.meta.client.head_bucket(Bucket=bucket_name)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
# If a client error is thrown, then check that it was a 404 error.
|
||||
# If it was a 404 error, then the bucket does not exist.
|
||||
error_code = e.response['Error']['Code']
|
||||
if error_code == '404':
|
||||
exists = False
|
||||
|
||||
if not exists:
|
||||
tty.error('S3 bucket "{0}" does not exist'.format(bucket_name))
|
||||
sys.exit(1)
|
||||
|
||||
build_cache_dir = os.path.join(
|
||||
'mirror', bindist.build_cache_relative_path())
|
||||
|
||||
spec_yaml_regex = re.compile('{0}/(.+\\.spec\\.yaml)$'.format(
|
||||
build_cache_dir))
|
||||
spack_regex = re.compile('{0}/([^/]+)/.+\\.spack$'.format(
|
||||
build_cache_dir))
|
||||
|
||||
top_level_keys = set()
|
||||
|
||||
for key in bucket.objects.all():
|
||||
m = spec_yaml_regex.search(key.key)
|
||||
if m:
|
||||
top_level_keys.add(m.group(1))
|
||||
print(m.group(1))
|
||||
continue
|
||||
|
||||
m = spack_regex.search(key.key)
|
||||
if m:
|
||||
top_level_keys.add(m.group(1))
|
||||
print(m.group(1))
|
||||
continue
|
||||
|
||||
index_data = {
|
||||
'top_level_keys': top_level_keys,
|
||||
}
|
||||
|
||||
env = template_engine.make_environment()
|
||||
template_dir = 'misc'
|
||||
index_template = os.path.join(template_dir, 'buildcache_index.html')
|
||||
t = env.get_template(index_template)
|
||||
contents = t.render(index_data)
|
||||
|
||||
index_key = os.path.join(build_cache_dir, 'index.html')
|
||||
|
||||
tty.debug('Generated index:')
|
||||
tty.debug(contents)
|
||||
tty.debug('Pushing it to {0} -> {1}'.format(bucket_name, index_key))
|
||||
|
||||
s3_obj = s3.Object(bucket_name, index_key)
|
||||
s3_obj.put(Body=contents, ACL='public-read')
|
||||
|
||||
|
||||
def upload_spec(args):
|
||||
"""Upload a spec to s3 bucket"""
|
||||
if not args.spec and not args.spec_yaml:
|
||||
tty.error('Cannot upload spec without spec arg or path to spec yaml')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.base_dir:
|
||||
tty.error('No base directory for buildcache specified')
|
||||
sys.exit(1)
|
||||
|
||||
if args.spec:
|
||||
try:
|
||||
spec = Spec(args.spec)
|
||||
spec.concretize()
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from string {0}'.format(
|
||||
args.spec))
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_yaml))
|
||||
sys.exit(1)
|
||||
|
||||
s3, bucket_name = get_s3_session(args.endpoint_url)
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
|
||||
tarball_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
|
||||
tarball_path = os.path.join(args.base_dir, tarball_key)
|
||||
|
||||
specfile_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_path = os.path.join(args.base_dir, specfile_key)
|
||||
|
||||
cdashidfile_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key)
|
||||
|
||||
tty.msg('Uploading {0}'.format(tarball_key))
|
||||
s3.meta.client.upload_file(
|
||||
tarball_path, bucket_name,
|
||||
os.path.join('mirror', tarball_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
tty.msg('Uploading {0}'.format(specfile_key))
|
||||
s3.meta.client.upload_file(
|
||||
specfile_path, bucket_name,
|
||||
os.path.join('mirror', specfile_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
if os.path.exists(cdashidfile_path):
|
||||
tty.msg('Uploading {0}'.format(cdashidfile_key))
|
||||
s3.meta.client.upload_file(
|
||||
cdashidfile_path, bucket_name,
|
||||
os.path.join('mirror', cdashidfile_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
|
||||
def upload_s3(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
@@ -48,6 +48,12 @@
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
yield
|
||||
|
||||
|
||||
# TODO: Provide an API automatically retyring a build after detecting and
|
||||
# TODO: clearing a failure.
|
||||
|
||||
@@ -87,6 +93,17 @@
|
||||
# Types of dependencies tracked by the database
|
||||
_tracked_deps = ('link', 'run')
|
||||
|
||||
# Default list of fields written for each install record
|
||||
default_install_record_fields = [
|
||||
'spec',
|
||||
'ref_count',
|
||||
'path',
|
||||
'installed',
|
||||
'explicit',
|
||||
'installation_time',
|
||||
'deprecated_for',
|
||||
]
|
||||
|
||||
|
||||
def _now():
|
||||
"""Returns the time since the epoch"""
|
||||
@@ -187,17 +204,17 @@ def install_type_matches(self, installed):
|
||||
else:
|
||||
return InstallStatuses.MISSING in installed
|
||||
|
||||
def to_dict(self):
|
||||
rec_dict = {
|
||||
'spec': self.spec.to_node_dict(),
|
||||
'path': self.path,
|
||||
'installed': self.installed,
|
||||
'ref_count': self.ref_count,
|
||||
'explicit': self.explicit,
|
||||
'installation_time': self.installation_time,
|
||||
}
|
||||
if self.deprecated_for:
|
||||
rec_dict.update({'deprecated_for': self.deprecated_for})
|
||||
def to_dict(self, include_fields=default_install_record_fields):
|
||||
rec_dict = {}
|
||||
|
||||
for field_name in include_fields:
|
||||
if field_name == 'spec':
|
||||
rec_dict.update({'spec': self.spec.to_node_dict()})
|
||||
elif field_name == 'deprecated_for' and self.deprecated_for:
|
||||
rec_dict.update({'deprecated_for': self.deprecated_for})
|
||||
else:
|
||||
rec_dict.update({field_name: getattr(self, field_name)})
|
||||
|
||||
return rec_dict
|
||||
|
||||
@classmethod
|
||||
@@ -206,9 +223,12 @@ def from_dict(cls, spec, dictionary):
|
||||
d.pop('spec', None)
|
||||
|
||||
# Old databases may have "None" for path for externals
|
||||
if d['path'] == 'None':
|
||||
if 'path' not in d or d['path'] == 'None':
|
||||
d['path'] = None
|
||||
|
||||
if 'installed' not in d:
|
||||
d['installed'] = False
|
||||
|
||||
return InstallRecord(spec, **d)
|
||||
|
||||
|
||||
@@ -275,7 +295,8 @@ class Database(object):
|
||||
_prefix_failures = {}
|
||||
|
||||
def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
is_upstream=False):
|
||||
is_upstream=False, enable_transaction_locking=True,
|
||||
record_fields=default_install_record_fields):
|
||||
"""Create a Database for Spack installations under ``root``.
|
||||
|
||||
A Database is a cache of Specs data from ``$prefix/spec.yaml``
|
||||
@@ -293,6 +314,12 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
Caller may optionally provide a custom ``db_dir`` parameter
|
||||
where data will be stored. This is intended to be used for
|
||||
testing the Database class.
|
||||
|
||||
This class supports writing buildcache index files, in which case
|
||||
certain fields are not needed in each install record, and no
|
||||
transaction locking is required. To use this feature, provide
|
||||
``enable_transaction_locking=False``, and specify a list of needed
|
||||
fields in ``record_fields``.
|
||||
"""
|
||||
self.root = root
|
||||
|
||||
@@ -356,14 +383,23 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
# message)
|
||||
self._fail_when_missing_deps = False
|
||||
|
||||
if enable_transaction_locking:
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
else:
|
||||
self._write_transaction_impl = nullcontext
|
||||
self._read_transaction_impl = nullcontext
|
||||
|
||||
self._record_fields = record_fields
|
||||
|
||||
def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(
|
||||
return self._write_transaction_impl(
|
||||
self.lock, acquire=self._read, release=self._write)
|
||||
|
||||
def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return lk.ReadTransaction(self.lock, acquire=self._read)
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
@@ -573,7 +609,8 @@ def _write_to_file(self, stream):
|
||||
This function does not do any locking or transactions.
|
||||
"""
|
||||
# map from per-spec hash code to installation record.
|
||||
installs = dict((k, v.to_dict()) for k, v in self._data.items())
|
||||
installs = dict((k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items())
|
||||
|
||||
# database includes installation list and version.
|
||||
|
||||
@@ -707,7 +744,8 @@ def check(cond, msg):
|
||||
|
||||
self.reindex(spack.store.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict()) for k, v in self._data.items()
|
||||
(k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items()
|
||||
)
|
||||
|
||||
def invalid_record(hash_key, error):
|
||||
|
42
lib/spack/spack/schema/buildcache_spec.py
Normal file
42
lib/spack/spack/schema/buildcache_spec.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for a buildcache spec.yaml file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
|
||||
:lines: 14-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack buildcache spec.yaml schema',
|
||||
'type': 'object',
|
||||
# 'additionalProperties': True,
|
||||
'properties': {
|
||||
'buildinfo': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['relative_prefix'],
|
||||
'properties': {
|
||||
'relative_prefix': {'type': 'string'},
|
||||
'relative_rpaths': {'type': 'boolean'},
|
||||
},
|
||||
},
|
||||
'full_hash': {'type': 'string'},
|
||||
'spec': {
|
||||
'type': 'array',
|
||||
'items': spack.schema.spec.properties,
|
||||
},
|
||||
'binary_cache_checksum': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'hash_algorithm': {'type': 'string'},
|
||||
'hash': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
58
lib/spack/spack/schema/database_index.py
Normal file
58
lib/spack/spack/schema/database_index.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for database index.json file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/database_index.py
|
||||
:lines: 36-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
# spack.schema.spec.properties
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack spec schema',
|
||||
'type': 'object',
|
||||
'required': ['database'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'database': {
|
||||
'type': 'object',
|
||||
'required': ['installs', 'version'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'installs': {
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
r'^[\w\d]{32}$': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'spec': spack.schema.spec.properties,
|
||||
'path': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'null'},
|
||||
],
|
||||
},
|
||||
'installed': {'type': 'boolean'},
|
||||
'ref_count': {
|
||||
'type': 'integer',
|
||||
'minimum': 0,
|
||||
},
|
||||
'explicit': {'type': 'boolean'},
|
||||
'installation_time': {
|
||||
'type': 'number',
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'version': {'type': 'string'},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
159
lib/spack/spack/schema/spec.py
Normal file
159
lib/spack/spack/schema/spec.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for a spec found in spec.yaml or database index.json files
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
|
||||
target = {
|
||||
'oneOf': [
|
||||
{
|
||||
'type': 'string',
|
||||
}, {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'name',
|
||||
'vendor',
|
||||
'features',
|
||||
'generation',
|
||||
'parents',
|
||||
],
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'vendor': {'type': 'string'},
|
||||
'features': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'generation': {'type': 'integer'},
|
||||
'parents': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
arch = {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'platform': {},
|
||||
'platform_os': {},
|
||||
'target': target,
|
||||
},
|
||||
}
|
||||
|
||||
dependencies = {
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
r'\w[\w-]*': { # package name
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'hash': {'type': 'string'},
|
||||
'type': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
r'\w[\w-]*': { # package name
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'version',
|
||||
'arch',
|
||||
'compiler',
|
||||
'namespace',
|
||||
'parameters',
|
||||
],
|
||||
'properties': {
|
||||
'hash': {'type': 'string'},
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'number'},
|
||||
],
|
||||
},
|
||||
'arch': arch,
|
||||
'compiler': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'version': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'namespace': {'type': 'string'},
|
||||
'parameters': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'cflags',
|
||||
'cppflags',
|
||||
'cxxflags',
|
||||
'fflags',
|
||||
'ldflags',
|
||||
'ldlibs',
|
||||
],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cppflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cxxflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'fflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldlib': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {},
|
||||
},
|
||||
'dependencies': dependencies,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack spec schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'patternProperties': properties,
|
||||
}
|
@@ -24,7 +24,7 @@
|
||||
def mock_get_specs(database, monkeypatch):
|
||||
specs = database.query_local()
|
||||
monkeypatch.setattr(
|
||||
spack.binary_distribution, 'get_specs', lambda x, y: specs
|
||||
spack.binary_distribution, 'get_specs', lambda x: specs
|
||||
)
|
||||
|
||||
|
||||
|
@@ -4,8 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
from jsonschema import validate
|
||||
|
||||
import spack
|
||||
import spack.ci as ci
|
||||
@@ -15,6 +17,8 @@
|
||||
from spack.main import SpackCommand
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
from spack.schema.buildcache_spec import schema as spec_yaml_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
import spack.util.executable as exe
|
||||
@@ -717,10 +721,28 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
ci.push_mirror_contents(
|
||||
env, concrete_spec, yaml_path, mirror_url, '42')
|
||||
|
||||
buildcache_list_output = buildcache_cmd('list', output=str)
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd('update-index', '--mirror-url', mirror_url)
|
||||
index_path = os.path.join(buildcache_path, 'index.json')
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
validate(index_object, db_idx_schema)
|
||||
|
||||
# Now that index is regenerated, validate "buildcache list" output
|
||||
buildcache_list_output = buildcache_cmd('list', output=str)
|
||||
assert('patchelf' in buildcache_list_output)
|
||||
|
||||
# Also test buildcache_spec schema
|
||||
bc_files_list = os.listdir(buildcache_path)
|
||||
for file_name in bc_files_list:
|
||||
if file_name.endswith('.spec.yaml'):
|
||||
spec_yaml_path = os.path.join(buildcache_path, file_name)
|
||||
with open(spec_yaml_path) as yaml_fd:
|
||||
yaml_object = syaml.load(yaml_fd)
|
||||
validate(yaml_object, spec_yaml_schema)
|
||||
|
||||
logs_dir = working_dir.join('logs_dir')
|
||||
if not os.path.exists(logs_dir.strpath):
|
||||
os.makedirs(logs_dir.strpath)
|
||||
|
@@ -20,6 +20,8 @@
|
||||
_use_uuid = False
|
||||
pass
|
||||
|
||||
from jsonschema import validate
|
||||
|
||||
import llnl.util.lock as lk
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
@@ -30,6 +32,7 @@
|
||||
import spack.spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.executable import Executable
|
||||
from spack.schema.database_index import schema
|
||||
|
||||
|
||||
pytestmark = pytest.mark.db
|
||||
@@ -424,6 +427,10 @@ def test_005_db_exists(database):
|
||||
assert os.path.exists(str(index_file))
|
||||
assert os.path.exists(str(lock_file))
|
||||
|
||||
with open(index_file) as fd:
|
||||
index_object = json.load(fd)
|
||||
validate(index_object, schema)
|
||||
|
||||
|
||||
def test_010_all_install_sanity(database):
|
||||
"""Ensure that the install layout reflects what we think it does."""
|
||||
@@ -716,6 +723,8 @@ def test_old_external_entries_prefix(mutable_database):
|
||||
with open(spack.store.db._index_path, 'r') as f:
|
||||
db_obj = json.loads(f.read())
|
||||
|
||||
validate(db_obj, schema)
|
||||
|
||||
s = spack.spec.Spec('externaltool')
|
||||
s.concretize()
|
||||
|
||||
|
@@ -108,6 +108,8 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
else:
|
||||
create_args.insert(create_args.index('-a'), '-u')
|
||||
|
||||
create_args.insert(create_args.index('-a'), '--rebuild-index')
|
||||
|
||||
args = parser.parse_args(create_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
# trigger overwrite warning
|
||||
@@ -165,7 +167,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
args = parser.parse_args(['list'])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
args = parser.parse_args(['list', '-f'])
|
||||
args = parser.parse_args(['list'])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
args = parser.parse_args(['list', 'trivial'])
|
||||
|
@@ -320,7 +320,7 @@ _spack() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="activate add arch blame build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure containerize create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload upload-s3 url verify versions view"
|
||||
SPACK_COMPREPLY="activate add arch blame build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure containerize create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload url verify versions view"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -385,7 +385,7 @@ _spack_buildcache() {
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --no-rebuild-index -y --spec-yaml --only"
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --rebuild-index -y --spec-yaml --only"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -403,7 +403,7 @@ _spack_buildcache_install() {
|
||||
_spack_buildcache_list() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force -a --allarch"
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -a --allarch"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1478,23 +1478,6 @@ _spack_unload() {
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_upload_s3() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="spec index"
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_upload_s3_spec() {
|
||||
SPACK_COMPREPLY="-h --help -s --spec -y --spec-yaml -b --base-dir -e --endpoint-url"
|
||||
}
|
||||
|
||||
_spack_upload_s3_index() {
|
||||
SPACK_COMPREPLY="-h --help -e --endpoint-url"
|
||||
}
|
||||
|
||||
_spack_url() {
|
||||
if $list_options
|
||||
then
|
||||
|
Reference in New Issue
Block a user