specs: move to new spec.json format with build provenance (#22845)
This is a major rework of Spack's core core `spec.yaml` metadata format. It moves from `spec.yaml` to `spec.json` for speed, and it changes the format in several ways. Specifically: 1. The spec format now has a `_meta` section with a version (now set to version `2`). This will simplify major changes like this one in the future. 2. The node list in spec dictionaries is no longer keyed by name. Instead, it is a list of records with no required key. The name, hash, etc. are fields in the dictionary records like any other. 3. Dependencies can be keyed by any hash (`hash`, `full_hash`, `build_hash`). 4. `build_spec` provenance from #20262 is included in the spec format. This means that, for spliced specs, we preserve the *full* provenance of how to build, and we can reproduce a spliced spec from the original builds that produced it. **NOTE**: Because we have switched the spec format, this PR changes Spack's hashing algorithm. This means that after this commit, Spack will think a lot of things need rebuilds. There are two major benefits this PR provides: * The switch to JSON format speeds up Spack significantly, as Python's builtin JSON implementation is orders of magnitude faster than YAML. * The new Spec format will soon allow us to represent DAGs with potentially multiple versions of the same dependency -- e.g., for build dependencies or for compilers-as-dependencies. This PR lays the necessary groundwork for those features. The old `spec.yaml` format continues to be supported, but is now considered a legacy format, and Spack will opportunistically convert these to the new `spec.json` format.
This commit is contained in:
@@ -708,14 +708,14 @@ def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (.yaml)
|
||||
under cache_prefix.
|
||||
cache_prefix. This page contains a link for each binary package (.yaml or
|
||||
.json) under cache_prefix.
|
||||
"""
|
||||
try:
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml'))
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -729,28 +729,33 @@ def generate_package_index(cache_prefix):
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
|
||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
|
||||
all_mirror_specs = {}
|
||||
|
||||
for file_path in file_list:
|
||||
try:
|
||||
yaml_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(yaml_url))
|
||||
_, _, yaml_file = web_util.read_from_url(yaml_url)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
spec_dict = syaml.load(yaml_contents)
|
||||
s = Spec.from_yaml(yaml_contents)
|
||||
spec_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
spec_dict = sjson.load(spec_file_contents)
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
spec_dict = syaml.load(spec_file_contents)
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
all_mirror_specs[s.dag_hash()] = {
|
||||
'yaml_url': yaml_url,
|
||||
'spec_url': spec_url,
|
||||
'spec': s,
|
||||
'num_deps': len(list(s.traverse(root=False))),
|
||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
||||
'buildinfo': spec_dict['buildinfo'],
|
||||
}
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading spec.yaml: {0}'.format(file_path))
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
||||
@@ -776,7 +781,7 @@ def generate_package_index(cache_prefix):
|
||||
# full hash. If the full hash we have for any deps does not
|
||||
# match what those deps have themselves, then we need to splice
|
||||
# this spec with those deps, and push this spliced spec
|
||||
# (spec.yaml file) back to the mirror, as well as update the
|
||||
# (spec.json file) back to the mirror, as well as update the
|
||||
# all_mirror_specs dictionary with this spliced spec.
|
||||
to_splice = []
|
||||
for dep in s.dependencies():
|
||||
@@ -794,25 +799,25 @@ def generate_package_index(cache_prefix):
|
||||
s = s.splice(true_dep, True)
|
||||
|
||||
# Push this spliced spec back to the mirror
|
||||
spliced_yaml = s.to_dict(hash=ht.full_hash)
|
||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
||||
spliced_yaml[key] = spec_record[key]
|
||||
spliced_spec_dict[key] = spec_record[key]
|
||||
|
||||
temp_yaml_path = os.path.join(tmpdir, 'spliced.spec.yaml')
|
||||
with open(temp_yaml_path, 'w') as fd:
|
||||
fd.write(syaml.dump(spliced_yaml))
|
||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
||||
with open(temp_json_path, 'w') as fd:
|
||||
fd.write(sjson.dump(spliced_spec_dict))
|
||||
|
||||
spliced_yaml_url = spec_record['yaml_url']
|
||||
spliced_spec_url = spec_record['spec_url']
|
||||
web_util.push_to_url(
|
||||
temp_yaml_path, spliced_yaml_url, keep_original=False)
|
||||
temp_json_path, spliced_spec_url, keep_original=False)
|
||||
tty.debug(' spliced and wrote {0}'.format(
|
||||
spliced_yaml_url))
|
||||
spliced_spec_url))
|
||||
spec_record['spec'] = s
|
||||
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now that we have fixed any old spec yamls that might have had the wrong
|
||||
# Now that we have fixed any old specfiles that might have had the wrong
|
||||
# full hash for their dependencies, we can generate the index, compute
|
||||
# the hash, and push those files to the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
@@ -948,19 +953,27 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# need to copy the spec file so the build cache can be downloaded
|
||||
# without concretizing with the current spack packages
|
||||
# and preferences
|
||||
spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.realpath(
|
||||
os.path.join(cache_prefix, specfile_name))
|
||||
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
if force:
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
# make a copy of the install directory to work with
|
||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
||||
@@ -1008,15 +1021,23 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# add sha256 checksum to spec.yaml
|
||||
# add sha256 checksum to spec.json
|
||||
|
||||
with open(spec_file, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
spec_dict = yaml.load(content)
|
||||
if spec_file.endswith('.yaml'):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith('.json'):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
spec_dict['binary_cache_checksum'] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.yaml.
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
@@ -1025,7 +1046,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
spec_dict['buildinfo'] = buildinfo
|
||||
|
||||
with open(specfile_path, 'w') as outfile:
|
||||
outfile.write(syaml.dump(spec_dict))
|
||||
outfile.write(sjson.dump(spec_dict))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
@@ -1305,15 +1326,26 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(tmpdir, specfile_name)
|
||||
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
@@ -1336,7 +1368,10 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
spec_dict = syaml.load(content)
|
||||
if specfile_is_json:
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -1413,27 +1448,39 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_is_json = True
|
||||
lenient = not full_hash_match
|
||||
found_specs = []
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url = url_util.join(
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url)
|
||||
fetched_spec_yaml = codecs.getreader('utf-8')(fs).read()
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url), url_err)
|
||||
continue
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
fetched_spec = Spec.from_yaml(fetched_spec_yaml)
|
||||
if specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
# Do not recompute the full hash for the fetched spec, instead just
|
||||
@@ -1461,7 +1508,7 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
is included in the results.
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.yaml``
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
files from remote mirrors, only consider the indices.
|
||||
|
||||
Return:
|
||||
@@ -1658,57 +1705,91 @@ def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
||||
tty.debug(spec.tree())
|
||||
|
||||
# Try to retrieve the .spec.yaml directly, based on the known
|
||||
# Try to retrieve the specfile directly, based on the known
|
||||
# format of the name, in order to determine if the package
|
||||
# needs to be rebuilt.
|
||||
cache_prefix = build_cache_prefix(mirror_url)
|
||||
spec_yaml_file_name = tarball_name(spec, '.spec.yaml')
|
||||
file_path = os.path.join(cache_prefix, spec_yaml_file_name)
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
||||
deprecated_specfile_name)
|
||||
|
||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
||||
|
||||
try:
|
||||
_, _, yaml_file = web_util.read_from_url(file_path)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(spec.short_spec, file_path))
|
||||
tty.debug(url_err)
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1} or {2}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(
|
||||
spec.short_spec,
|
||||
specfile_path,
|
||||
deprecated_specfile_path))
|
||||
tty.debug(url_err)
|
||||
tty.debug(url_err_y)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
if not spec_file_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(
|
||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
if not yaml_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(file_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
spec_dict = (sjson.load(spec_file_contents)
|
||||
if specfile_is_json else syaml.load(spec_file_contents))
|
||||
|
||||
spec_yaml = syaml.load(yaml_contents)
|
||||
|
||||
yaml_spec = spec_yaml['spec']
|
||||
try:
|
||||
nodes = spec_dict['spec']['nodes']
|
||||
except KeyError:
|
||||
# Prior node dict format omitted 'nodes' key
|
||||
nodes = spec_dict['spec']
|
||||
name = spec.name
|
||||
|
||||
# The "spec" key in the yaml is a list of objects, each with a single
|
||||
# In the old format:
|
||||
# The "spec" key represents a list of objects, each with a single
|
||||
# key that is the package name. While the list usually just contains
|
||||
# a single object, we iterate over the list looking for the object
|
||||
# with the name of this concrete spec as a key, out of an abundance
|
||||
# of caution.
|
||||
cached_pkg_specs = [item[name] for item in yaml_spec if name in item]
|
||||
# In format version 2:
|
||||
# ['spec']['nodes'] is still a list of objects, but with a
|
||||
# multitude of keys. The list will commonly contain many objects, and in the
|
||||
# case of build specs, it is highly likely that the same name will occur
|
||||
# once as the actual package, and then again as the build provenance of that
|
||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
||||
if nodes and 'name' not in nodes[0]:
|
||||
# old style
|
||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
||||
cached_pkg_specs = [item for item in nodes
|
||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
||||
|
||||
# If either the full_hash didn't exist in the .spec.yaml file, or it
|
||||
# If either the full_hash didn't exist in the specfile, or it
|
||||
# did, but didn't match the one we computed locally, then we should
|
||||
# just rebuild. This can be simplified once the dag_hash and the
|
||||
# full_hash become the same thing.
|
||||
rebuild = False
|
||||
if not cached_target or 'full_hash' not in cached_target:
|
||||
reason = 'full_hash was missing from remote spec.yaml'
|
||||
|
||||
if not cached_target:
|
||||
reason = 'did not find spec in specfile contents'
|
||||
rebuild = True
|
||||
elif ht.full_hash.name not in cached_target:
|
||||
reason = 'full_hash was missing from remote specfile'
|
||||
rebuild = True
|
||||
else:
|
||||
full_hash = cached_target['full_hash']
|
||||
full_hash = cached_target[ht.full_hash.name]
|
||||
if full_hash != pkg_full_hash:
|
||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
||||
full_hash, pkg_full_hash)
|
||||
@@ -1770,24 +1851,23 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
|
||||
def _download_buildcache_entry(mirror_root, descriptions):
|
||||
for description in descriptions:
|
||||
description_url = os.path.join(mirror_root, description['url'])
|
||||
path = description['path']
|
||||
fail_if_missing = description['required']
|
||||
|
||||
mkdirp(path)
|
||||
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
fail_if_missing = description['required']
|
||||
for url in description['url']:
|
||||
description_url = os.path.join(mirror_root, url)
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
tty.error('Failed to download required url {0}'.format(
|
||||
description_url))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -876,6 +876,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec:
|
||||
tty.debug('Pruning spec that does not need to be rebuilt.')
|
||||
continue
|
||||
|
||||
# Check if this spec is in our list of known failures, now that
|
||||
@@ -922,7 +923,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
bc_root = os.path.join(
|
||||
local_mirror_dir, 'build_cache')
|
||||
artifact_paths.extend([os.path.join(bc_root, p) for p in [
|
||||
bindist.tarball_name(release_spec, '.spec.yaml'),
|
||||
bindist.tarball_name(release_spec, '.spec.json'),
|
||||
bindist.tarball_name(release_spec, '.cdashid'),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]])
|
||||
@@ -1381,13 +1382,13 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
return int(contents)
|
||||
|
||||
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
|
||||
def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_yaml=yaml_path, add_deps=False,
|
||||
env, spec_file=specfile_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
except Exception as inst:
|
||||
|
@@ -262,14 +262,14 @@ def display_specs_as_json(specs, deps=False):
|
||||
if spec.dag_hash() in seen:
|
||||
continue
|
||||
seen.add(spec.dag_hash())
|
||||
records.append(spec.to_record_dict())
|
||||
records.append(spec.to_node_dict())
|
||||
|
||||
if deps:
|
||||
for dep in spec.traverse():
|
||||
if dep.dag_hash() in seen:
|
||||
continue
|
||||
seen.add(dep.dag_hash())
|
||||
records.append(dep.to_record_dict())
|
||||
records.append(dep.to_node_dict())
|
||||
|
||||
sjson.dump(records, sys.stdout)
|
||||
|
||||
|
@@ -28,7 +28,7 @@
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
@@ -75,8 +75,9 @@ def setup_parser(subparser):
|
||||
create.add_argument('--rebuild-index', action='store_true',
|
||||
default=False, help="Regenerate buildcache index " +
|
||||
"after building package(s)")
|
||||
create.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Create buildcache entry for spec from yaml file')
|
||||
create.add_argument('--spec-file', default=None,
|
||||
help=('Create buildcache entry for spec from json or ' +
|
||||
'yaml file'))
|
||||
create.add_argument('--only', default='package,dependencies',
|
||||
dest='things_to_install',
|
||||
choices=['package', 'dependencies'],
|
||||
@@ -163,8 +164,9 @@ def setup_parser(subparser):
|
||||
help='Check single spec instead of release specs file')
|
||||
|
||||
check.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Check single spec from yaml file instead of release specs file')
|
||||
'--spec-file', default=None,
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
@@ -173,14 +175,15 @@ def setup_parser(subparser):
|
||||
|
||||
check.set_defaults(func=check_binaries)
|
||||
|
||||
# Download tarball and spec.yaml
|
||||
# Download tarball and specfile
|
||||
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
|
||||
dltarball.add_argument(
|
||||
'-s', '--spec', default=None,
|
||||
help="Download built tarball for spec from mirror")
|
||||
dltarball.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help="Download built tarball for spec (from yaml file) from mirror")
|
||||
'--spec-file', default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) " +
|
||||
"from mirror"))
|
||||
dltarball.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
@@ -196,26 +199,27 @@ def setup_parser(subparser):
|
||||
'-s', '--spec', default=None,
|
||||
help='Spec string for which buildcache name is desired')
|
||||
getbuildcachename.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Path to spec yaml file for which buildcache name is desired')
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file for which buildcache name is ' +
|
||||
'desired'))
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
saveyaml = subparsers.add_parser('save-yaml',
|
||||
help=save_spec_yamls.__doc__)
|
||||
saveyaml.add_argument(
|
||||
savespecfile = subparsers.add_parser('save-specfile',
|
||||
help=save_specfiles.__doc__)
|
||||
savespecfile.add_argument(
|
||||
'--root-spec', default=None,
|
||||
help='Root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
'--root-spec-yaml', default=None,
|
||||
help='Path to yaml file containing root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
savespecfile.add_argument(
|
||||
'--root-specfile', default=None,
|
||||
help='Path to json or yaml file containing root spec of dependent spec')
|
||||
savespecfile.add_argument(
|
||||
'-s', '--specs', default=None,
|
||||
help='List of dependent specs for which saved yaml is desired')
|
||||
saveyaml.add_argument(
|
||||
'-y', '--yaml-dir', default=None,
|
||||
savespecfile.add_argument(
|
||||
'--specfile-dir', default=None,
|
||||
help='Path to directory where spec yamls should be saved')
|
||||
saveyaml.set_defaults(func=save_spec_yamls)
|
||||
savespecfile.set_defaults(func=save_specfiles)
|
||||
|
||||
# Copy buildcache from some directory to another mirror url
|
||||
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
|
||||
@@ -223,8 +227,9 @@ def setup_parser(subparser):
|
||||
'--base-dir', default=None,
|
||||
help='Path to mirror directory (root of existing buildcache)')
|
||||
copy.add_argument(
|
||||
'--spec-yaml', default=None,
|
||||
help='Path to spec yaml file representing buildcache entry to copy')
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file representing buildcache entry to' +
|
||||
' copy'))
|
||||
copy.add_argument(
|
||||
'--destination-url', default=None,
|
||||
help='Destination mirror url')
|
||||
@@ -366,16 +371,19 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
def _createtarball(env, spec_file=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_yaml:
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
yaml_text = fd.read()
|
||||
tty.debug('createtarball read spec yaml:')
|
||||
tty.debug(yaml_text)
|
||||
s = Spec.from_yaml(yaml_text)
|
||||
if spec_file:
|
||||
with open(spec_file, 'r') as fd:
|
||||
specfile_contents = fd.read()
|
||||
tty.debug('createtarball read specfile contents:')
|
||||
tty.debug(specfile_contents)
|
||||
if spec_file.endswith('.json'):
|
||||
s = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
s = Spec.from_yaml(specfile_contents)
|
||||
package = '/{0}'.format(s.dag_hash())
|
||||
matches = find_matching_specs(package, env=env)
|
||||
|
||||
@@ -388,7 +396,7 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
else:
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a yaml file containing a spec" +
|
||||
" or else a path to a json or yaml file containing a spec" +
|
||||
" to install")
|
||||
specs = set()
|
||||
|
||||
@@ -497,7 +505,7 @@ def createtarball(args):
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, spec_yaml=args.spec_yaml, packages=args.specs,
|
||||
_createtarball(env, spec_file=args.spec_file, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
@@ -598,7 +606,7 @@ def check_binaries(args):
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""
|
||||
if args.spec or args.spec_yaml:
|
||||
if args.spec or args.spec_file:
|
||||
specs = [get_concrete_spec(args)]
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache')
|
||||
@@ -635,15 +643,16 @@ def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
'url': tarball_path_name,
|
||||
'url': [tarball_path_name],
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': bindist.tarball_name(concrete_spec, '.spec.yaml'),
|
||||
'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
|
||||
bindist.tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': local_dest,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': bindist.tarball_name(concrete_spec, '.cdashid'),
|
||||
'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
|
||||
'path': local_dest,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
@@ -657,9 +666,9 @@ def get_tarball(args):
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components. Normally, just the
|
||||
tarball and .spec.yaml files are required, but if the --require-cdashid
|
||||
tarball and .spec.json files are required, but if the --require-cdashid
|
||||
argument was provided, then a .cdashid file is also required."""
|
||||
if not args.spec and not args.spec_yaml:
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg('No specs provided, exiting.')
|
||||
sys.exit(0)
|
||||
|
||||
@@ -676,7 +685,7 @@ def get_tarball(args):
|
||||
|
||||
def get_concrete_spec(args):
|
||||
spec_str = args.spec
|
||||
spec_yaml_path = args.spec_yaml
|
||||
spec_yaml_path = args.spec_file
|
||||
|
||||
if not spec_str and not spec_yaml_path:
|
||||
tty.msg('Must provide either spec string or path to ' +
|
||||
@@ -708,14 +717,14 @@ def get_buildcache_name(args):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def save_spec_yamls(args):
|
||||
def save_specfiles(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero."""
|
||||
if not args.root_spec and not args.root_spec_yaml:
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg('No root spec provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -723,20 +732,20 @@ def save_spec_yamls(args):
|
||||
tty.msg('No dependent specs provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.yaml_dir:
|
||||
if not args.specfile_dir:
|
||||
tty.msg('No yaml directory provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_spec_yaml:
|
||||
with open(args.root_spec_yaml) as fd:
|
||||
root_spec_as_yaml = fd.read()
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
|
||||
|
||||
save_dependency_spec_yamls(
|
||||
root_spec_as_yaml, args.yaml_dir, args.specs.split())
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
@@ -745,10 +754,10 @@ def buildcache_copy(args):
|
||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
||||
The specific buildcache entry to be copied from one location to the
|
||||
other is identified using the '--spec-yaml' argument."""
|
||||
other is identified using the '--spec-file' argument."""
|
||||
# TODO: This sub-command should go away once #11117 is merged
|
||||
|
||||
if not args.spec_yaml:
|
||||
if not args.spec_file:
|
||||
tty.msg('No spec yaml provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -768,12 +777,12 @@ def buildcache_copy(args):
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
with open(args.spec_file, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_yaml))
|
||||
args.spec_file))
|
||||
sys.exit(1)
|
||||
|
||||
dest_root_path = dest_url
|
||||
@@ -788,10 +797,15 @@ def buildcache_copy(args):
|
||||
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
|
||||
|
||||
specfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.json'))
|
||||
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
specfile_rel_path_yaml = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
@@ -807,6 +821,9 @@ def buildcache_copy(args):
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path))
|
||||
shutil.copyfile(specfile_src_path, specfile_dest_path)
|
||||
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
@@ -894,6 +911,8 @@ def buildcache_sync(args):
|
||||
build_cache_dir, bindist.tarball_path_name(s, '.spack')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
@@ -78,8 +78,8 @@ def setup_parser(subparser):
|
||||
default=False, help="""Spack always check specs against configured
|
||||
binary mirrors when generating the pipeline, regardless of whether or not
|
||||
DAG pruning is enabled. This flag controls whether it might attempt to
|
||||
fetch remote spec.yaml files directly (ensuring no spec is rebuilt if it is
|
||||
present on the mirror), or whether it should reduce pipeline generation time
|
||||
fetch remote spec files directly (ensuring no spec is rebuilt if it
|
||||
is present on the mirror), or whether it should reduce pipeline generation time
|
||||
by assuming all remote buildcache indices are up to date and only use those
|
||||
to determine whether a given spec is up to date on mirrors. In the latter
|
||||
case, specs might be needlessly rebuilt if remote buildcache indices are out
|
||||
|
@@ -74,6 +74,7 @@ def create_db_tarball(args):
|
||||
wd = os.path.dirname(str(spack.store.root))
|
||||
with working_dir(wd):
|
||||
files = [spack.store.db._index_path]
|
||||
files += glob('%s/*/*/*/.spack/spec.json' % base)
|
||||
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
||||
|
@@ -401,7 +401,10 @@ def get_tests(specs):
|
||||
# 2. Concrete specs from yaml files
|
||||
for file in args.specfiles:
|
||||
with open(file, 'r') as f:
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
if file.endswith('yaml') or file.endswith('yml'):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
|
@@ -40,6 +40,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.hash_types as ht
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -66,7 +67,7 @@ def nullcontext(*args, **kwargs):
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = Version('5')
|
||||
_db_version = Version('6')
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -77,6 +78,7 @@ def nullcontext(*args, **kwargs):
|
||||
# fields. So, skip the reindex for this transition. The new
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(Version('0.9.3'), Version('5')),
|
||||
(Version('5'), Version('6'))
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -645,7 +647,7 @@ def _write_to_file(self, stream):
|
||||
except (TypeError, ValueError) as e:
|
||||
raise sjson.SpackJSONError("error writing JSON database:", str(e))
|
||||
|
||||
def _read_spec_from_dict(self, hash_key, installs):
|
||||
def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
|
||||
"""Recursively construct a spec from a hash in a YAML database.
|
||||
|
||||
Does not do any locking.
|
||||
@@ -654,8 +656,13 @@ def _read_spec_from_dict(self, hash_key, installs):
|
||||
|
||||
# Install records don't include hash with spec, so we add it in here
|
||||
# to ensure it is read properly.
|
||||
for name in spec_dict:
|
||||
spec_dict[name]['hash'] = hash_key
|
||||
if 'name' not in spec_dict.keys():
|
||||
# old format, can't update format here
|
||||
for name in spec_dict:
|
||||
spec_dict[name]['hash'] = hash_key
|
||||
else:
|
||||
# new format, already a singleton
|
||||
spec_dict[hash.name] = hash_key
|
||||
|
||||
# Build spec from dict first.
|
||||
spec = spack.spec.Spec.from_node_dict(spec_dict)
|
||||
@@ -686,10 +693,13 @@ def _assign_dependencies(self, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
spec_dict = installs[hash_key]['spec']
|
||||
if 'dependencies' in spec_dict[spec.name]:
|
||||
yaml_deps = spec_dict[spec.name]['dependencies']
|
||||
for dname, dhash, dtypes in spack.spec.Spec.read_yaml_dep_specs(
|
||||
spec_node_dict = installs[hash_key]['spec']
|
||||
if 'name' not in spec_node_dict:
|
||||
# old format
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if 'dependencies' in spec_node_dict:
|
||||
yaml_deps = spec_node_dict['dependencies']
|
||||
for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(
|
||||
yaml_deps):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
|
@@ -12,7 +12,8 @@
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
from llnl.util.filesystem import mkdirp
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.hash_types as ht
|
||||
@@ -33,50 +34,286 @@ def _check_concrete(spec):
|
||||
|
||||
class DirectoryLayout(object):
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want differnet layouts for their
|
||||
install, and they can use this to customize the nesting structure of
|
||||
spack installs.
|
||||
Different installations are going to want different layouts for their
|
||||
install, and they can use this to customize the nesting structure of
|
||||
spack installs. The default layout is:
|
||||
|
||||
* <install root>/
|
||||
|
||||
* <platform-os-target>/
|
||||
|
||||
* <compiler>-<compiler version>/
|
||||
|
||||
* <name>-<version>-<hash>
|
||||
|
||||
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||
spec.
|
||||
|
||||
The installation directory projections can be modified with the
|
||||
projections argument.
|
||||
"""
|
||||
|
||||
def __init__(self, root):
|
||||
def __init__(self, root, **kwargs):
|
||||
self.root = root
|
||||
self.check_upstream = True
|
||||
projections = kwargs.get('projections') or default_projections
|
||||
self.projections = dict((key, projection.lower())
|
||||
for key, projection in projections.items())
|
||||
|
||||
# apply hash length as appropriate
|
||||
self.hash_length = kwargs.get('hash_length', None)
|
||||
if self.hash_length is not None:
|
||||
for when_spec, projection in self.projections.items():
|
||||
if '{hash}' not in projection:
|
||||
if '{hash' in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash"
|
||||
" length")
|
||||
else:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Cannot specify hash length when the hash is not"
|
||||
" part of all install_tree projections")
|
||||
self.projections[when_spec] = projection.replace(
|
||||
"{hash}", "{hash:%d}" % self.hash_length)
|
||||
|
||||
# If any of these paths change, downstream databases may not be able to
|
||||
# locate files in older upstream databases
|
||||
self.metadata_dir = '.spack'
|
||||
self.deprecated_dir = 'deprecated'
|
||||
self.spec_file_name = 'spec.json'
|
||||
# Use for checking yaml and deprecated types
|
||||
self._spec_file_name_yaml = 'spec.yaml'
|
||||
self.extension_file_name = 'extensions.yaml'
|
||||
self.packages_dir = 'repos' # archive of package.py files
|
||||
self.manifest_file_name = 'install_manifest.json'
|
||||
|
||||
@property
|
||||
def hidden_file_paths(self):
|
||||
"""Return a list of hidden files used by the directory layout.
|
||||
|
||||
Paths are relative to the root of an install directory.
|
||||
|
||||
If the directory layout uses no hidden files to maintain
|
||||
state, this should return an empty container, e.g. [] or (,).
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def all_specs(self):
|
||||
"""To be implemented by subclasses to traverse all specs for which there is
|
||||
a directory within the root.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
return (self.metadata_dir,)
|
||||
|
||||
def relative_path_for_spec(self, spec):
|
||||
"""Implemented by subclasses to return a relative path from the install
|
||||
root to a unique location for the provided spec."""
|
||||
raise NotImplementedError()
|
||||
_check_concrete(spec)
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
_check_concrete(spec)
|
||||
with open(path, 'w') as f:
|
||||
# The hash the the projection is the DAG hash but we write out the
|
||||
# full provenance by full hash so it's availabe if we want it later
|
||||
# extension = os.path.splitext(path)[-1].lower()
|
||||
# if 'json' in extension:
|
||||
spec.to_json(f, hash=ht.full_hash)
|
||||
# elif 'yaml' in extension:
|
||||
# spec.to_yaml(f, hash=ht.full_hash)
|
||||
|
||||
def write_host_environment(self, spec):
|
||||
"""The host environment is a json file with os, kernel, and spack
|
||||
versioning. We use it in the case that an analysis later needs to
|
||||
easily access this information.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = get_host_environment_metadata()
|
||||
with open(env_file, 'w') as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
def read_spec(self, path):
|
||||
"""Read the contents of a file and parse them as a spec"""
|
||||
try:
|
||||
with open(path) as f:
|
||||
extension = os.path.splitext(path)[-1].lower()
|
||||
if extension == '.json':
|
||||
spec = spack.spec.Spec.from_json(f)
|
||||
elif extension == '.yaml':
|
||||
# Too late for conversion; spec_file_path() already called.
|
||||
spec = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
raise SpecReadError('Did not recognize spec file extension:'
|
||||
' {0}'.format(extension))
|
||||
except Exception as e:
|
||||
if spack.config.get('config:debug'):
|
||||
raise
|
||||
raise SpecReadError(
|
||||
'Unable to read file: %s' % path, 'Cause: ' + str(e))
|
||||
|
||||
# Specs read from actual installations are always concrete
|
||||
spec._mark_concrete()
|
||||
return spec
|
||||
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
# Attempts to convert to JSON if possible.
|
||||
# Otherwise just returns the YAML.
|
||||
yaml_path = os.path.join(
|
||||
self.metadata_path(spec), self._spec_file_name_yaml)
|
||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except OSError as err:
|
||||
tty.debug('Could not remove deprecated {0}'.format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
return json_path
|
||||
|
||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
"""Gets full path to spec file for deprecated spec
|
||||
|
||||
If the deprecator_spec is provided, use that. Otherwise, assume
|
||||
deprecated_spec is already deprecated and its prefix links to the
|
||||
prefix of its deprecator."""
|
||||
_check_concrete(deprecated_spec)
|
||||
if deprecator_spec:
|
||||
_check_concrete(deprecator_spec)
|
||||
|
||||
# If deprecator spec is None, assume deprecated_spec already deprecated
|
||||
# and use its link to find the file.
|
||||
base_dir = self.path_for_spec(
|
||||
deprecator_spec
|
||||
) if deprecator_spec else os.readlink(deprecated_spec.prefix)
|
||||
|
||||
yaml_path = os.path.join(base_dir, self.metadata_dir,
|
||||
self.deprecated_dir, deprecated_spec.dag_hash()
|
||||
+ '_' + self._spec_file_name_yaml)
|
||||
|
||||
json_path = os.path.join(base_dir, self.metadata_dir,
|
||||
self.deprecated_dir, deprecated_spec.dag_hash()
|
||||
+ '_' + self.spec_file_name)
|
||||
|
||||
if (os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path)):
|
||||
self.write_spec(deprecated_spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except (IOError, OSError) as err:
|
||||
tty.debug('Could not remove deprecated {0}'.format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
|
||||
return json_path
|
||||
|
||||
@contextmanager
|
||||
def disable_upstream_check(self):
|
||||
self.check_upstream = False
|
||||
yield
|
||||
self.check_upstream = True
|
||||
|
||||
def metadata_path(self, spec):
|
||||
return os.path.join(spec.prefix, self.metadata_dir)
|
||||
|
||||
def env_metadata_path(self, spec):
|
||||
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
||||
|
||||
def build_packages_path(self, spec):
|
||||
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
||||
|
||||
def create_install_directory(self, spec):
|
||||
"""Creates the installation directory for a spec."""
|
||||
raise NotImplementedError()
|
||||
_check_concrete(spec)
|
||||
|
||||
prefix = self.check_installed(spec)
|
||||
if prefix:
|
||||
raise InstallDirectoryAlreadyExistsError(prefix)
|
||||
|
||||
# Create install directory with properly configured permissions
|
||||
# Cannot import at top of file
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
|
||||
# Each package folder can have its own specific permissions, while
|
||||
# intermediate folders (arch/compiler) are set with access permissions
|
||||
# equivalent to the root permissions of the layout.
|
||||
group = get_package_group(spec)
|
||||
perms = get_package_dir_permissions(spec)
|
||||
|
||||
fs.mkdirp(spec.prefix, mode=perms, group=group, default_perms='parents')
|
||||
fs.mkdirp(self.metadata_path(spec), mode=perms, group=group) # in prefix
|
||||
|
||||
self.write_spec(spec, self.spec_file_path(spec))
|
||||
|
||||
def check_installed(self, spec):
|
||||
"""Checks whether a spec is installed.
|
||||
_check_concrete(spec)
|
||||
path = self.path_for_spec(spec)
|
||||
spec_file_path = self.spec_file_path(spec)
|
||||
|
||||
Return the spec's prefix, if it is installed, None otherwise.
|
||||
if not os.path.isdir(path):
|
||||
return None
|
||||
|
||||
Raise an exception if the install is inconsistent or corrupt.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
if not os.path.isfile(spec_file_path):
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Install prefix exists but contains no spec.json:',
|
||||
" " + path)
|
||||
|
||||
installed_spec = self.read_spec(spec_file_path)
|
||||
if installed_spec == spec:
|
||||
return path
|
||||
|
||||
# DAG hashes currently do not include build dependencies.
|
||||
#
|
||||
# TODO: remove this when we do better concretization and don't
|
||||
# ignore build-only deps in hashes.
|
||||
elif (installed_spec.copy(deps=('link', 'run')) ==
|
||||
spec.copy(deps=('link', 'run'))):
|
||||
# The directory layout prefix is based on the dag hash, so among
|
||||
# specs with differing full-hash but matching dag-hash, only one
|
||||
# may be installed. This means for example that for two instances
|
||||
# that differ only in CMake version used to build, only one will
|
||||
# be installed.
|
||||
return path
|
||||
|
||||
if spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise SpecHashCollisionError(spec, installed_spec)
|
||||
else:
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Spec file in %s does not match hash!' % spec_file_path)
|
||||
|
||||
def all_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
specs = []
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(os.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [self.metadata_dir, 'spec.json']
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
if not spec_files: # we're probably looking at legacy yaml...
|
||||
path_elems += [self.metadata_dir, 'spec.yaml']
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
specs.extend([self.read_spec(s) for s in spec_files])
|
||||
return specs
|
||||
|
||||
def all_deprecated_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
deprecated_specs = set()
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(os.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [self.metadata_dir, self.deprecated_dir,
|
||||
'*_spec.*'] # + self.spec_file_name]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
get_depr_spec_file = lambda x: os.path.join(
|
||||
os.path.dirname(os.path.dirname(x)), self.spec_file_name)
|
||||
deprecated_specs |= set((self.read_spec(s),
|
||||
self.read_spec(get_depr_spec_file(s)))
|
||||
for s in spec_files)
|
||||
return deprecated_specs
|
||||
|
||||
def specs_by_hash(self):
|
||||
by_hash = {}
|
||||
for spec in self.all_specs():
|
||||
by_hash[spec.dag_hash()] = spec
|
||||
return by_hash
|
||||
|
||||
def path_for_spec(self, spec):
|
||||
"""Return absolute path from the root to a directory for the spec."""
|
||||
@@ -183,236 +420,6 @@ def remove_extension(self, spec, ext_spec):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class YamlDirectoryLayout(DirectoryLayout):
|
||||
"""By default lays out installation directories like this::
|
||||
<install root>/
|
||||
<platform-os-target>/
|
||||
<compiler>-<compiler version>/
|
||||
<name>-<version>-<hash>
|
||||
|
||||
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||
spec. TODO: implement the build spec.
|
||||
|
||||
The installation directory projections can be modified with the
|
||||
projections argument.
|
||||
"""
|
||||
|
||||
def __init__(self, root, **kwargs):
|
||||
super(YamlDirectoryLayout, self).__init__(root)
|
||||
projections = kwargs.get('projections') or default_projections
|
||||
self.projections = dict((key, projection.lower())
|
||||
for key, projection in projections.items())
|
||||
|
||||
# apply hash length as appropriate
|
||||
self.hash_length = kwargs.get('hash_length', None)
|
||||
if self.hash_length is not None:
|
||||
for when_spec, projection in self.projections.items():
|
||||
if '{hash}' not in projection:
|
||||
if '{hash' in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash"
|
||||
" length")
|
||||
else:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Cannot specify hash length when the hash is not"
|
||||
" part of all install_tree projections")
|
||||
self.projections[when_spec] = projection.replace(
|
||||
"{hash}", "{hash:%d}" % self.hash_length)
|
||||
|
||||
# If any of these paths change, downstream databases may not be able to
|
||||
# locate files in older upstream databases
|
||||
self.metadata_dir = '.spack'
|
||||
self.deprecated_dir = 'deprecated'
|
||||
self.spec_file_name = 'spec.yaml'
|
||||
self.extension_file_name = 'extensions.yaml'
|
||||
self.packages_dir = 'repos' # archive of package.py files
|
||||
self.manifest_file_name = 'install_manifest.json'
|
||||
|
||||
@property
|
||||
def hidden_file_paths(self):
|
||||
return (self.metadata_dir,)
|
||||
|
||||
def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
_check_concrete(spec)
|
||||
with open(path, 'w') as f:
|
||||
# The hash the the projection is the DAG hash but we write out the
|
||||
# full provenance by full hash so it's availabe if we want it later
|
||||
spec.to_yaml(f, hash=ht.full_hash)
|
||||
|
||||
def write_host_environment(self, spec):
|
||||
"""The host environment is a json file with os, kernel, and spack
|
||||
versioning. We use it in the case that an analysis later needs to
|
||||
easily access this information.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = get_host_environment_metadata()
|
||||
with open(env_file, 'w') as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
def read_spec(self, path):
|
||||
"""Read the contents of a file and parse them as a spec"""
|
||||
try:
|
||||
with open(path) as f:
|
||||
spec = spack.spec.Spec.from_yaml(f)
|
||||
except Exception as e:
|
||||
if spack.config.get('config:debug'):
|
||||
raise
|
||||
raise SpecReadError(
|
||||
'Unable to read file: %s' % path, 'Cause: ' + str(e))
|
||||
|
||||
# Specs read from actual installations are always concrete
|
||||
spec._mark_concrete()
|
||||
return spec
|
||||
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
return os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||
|
||||
def deprecated_file_name(self, spec):
|
||||
"""Gets name of deprecated spec file in deprecated dir"""
|
||||
_check_concrete(spec)
|
||||
return spec.dag_hash() + '_' + self.spec_file_name
|
||||
|
||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
"""Gets full path to spec file for deprecated spec
|
||||
|
||||
If the deprecator_spec is provided, use that. Otherwise, assume
|
||||
deprecated_spec is already deprecated and its prefix links to the
|
||||
prefix of its deprecator."""
|
||||
_check_concrete(deprecated_spec)
|
||||
if deprecator_spec:
|
||||
_check_concrete(deprecator_spec)
|
||||
|
||||
# If deprecator spec is None, assume deprecated_spec already deprecated
|
||||
# and use its link to find the file.
|
||||
base_dir = self.path_for_spec(
|
||||
deprecator_spec
|
||||
) if deprecator_spec else os.readlink(deprecated_spec.prefix)
|
||||
|
||||
return os.path.join(base_dir, self.metadata_dir, self.deprecated_dir,
|
||||
self.deprecated_file_name(deprecated_spec))
|
||||
|
||||
@contextmanager
|
||||
def disable_upstream_check(self):
|
||||
self.check_upstream = False
|
||||
yield
|
||||
self.check_upstream = True
|
||||
|
||||
def metadata_path(self, spec):
|
||||
return os.path.join(spec.prefix, self.metadata_dir)
|
||||
|
||||
def env_metadata_path(self, spec):
|
||||
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
||||
|
||||
def build_packages_path(self, spec):
|
||||
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
||||
|
||||
def create_install_directory(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
prefix = self.check_installed(spec)
|
||||
if prefix:
|
||||
raise InstallDirectoryAlreadyExistsError(prefix)
|
||||
|
||||
# Create install directory with properly configured permissions
|
||||
# Cannot import at top of file
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
|
||||
# Each package folder can have its own specific permissions, while
|
||||
# intermediate folders (arch/compiler) are set with access permissions
|
||||
# equivalent to the root permissions of the layout.
|
||||
group = get_package_group(spec)
|
||||
perms = get_package_dir_permissions(spec)
|
||||
|
||||
mkdirp(spec.prefix, mode=perms, group=group, default_perms='parents')
|
||||
mkdirp(self.metadata_path(spec), mode=perms, group=group) # in prefix
|
||||
|
||||
self.write_spec(spec, self.spec_file_path(spec))
|
||||
|
||||
def check_installed(self, spec):
|
||||
_check_concrete(spec)
|
||||
path = self.path_for_spec(spec)
|
||||
spec_file_path = self.spec_file_path(spec)
|
||||
|
||||
if not os.path.isdir(path):
|
||||
return None
|
||||
|
||||
if not os.path.isfile(spec_file_path):
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Install prefix exists but contains no spec.yaml:',
|
||||
" " + path)
|
||||
|
||||
installed_spec = self.read_spec(spec_file_path)
|
||||
if installed_spec == spec:
|
||||
return path
|
||||
|
||||
# DAG hashes currently do not include build dependencies.
|
||||
#
|
||||
# TODO: remove this when we do better concretization and don't
|
||||
# ignore build-only deps in hashes.
|
||||
elif (installed_spec.copy(deps=('link', 'run')) ==
|
||||
spec.copy(deps=('link', 'run'))):
|
||||
# The directory layout prefix is based on the dag hash, so among
|
||||
# specs with differing full-hash but matching dag-hash, only one
|
||||
# may be installed. This means for example that for two instances
|
||||
# that differ only in CMake version used to build, only one will
|
||||
# be installed.
|
||||
return path
|
||||
|
||||
if spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise SpecHashCollisionError(spec, installed_spec)
|
||||
else:
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Spec file in %s does not match hash!' % spec_file_path)
|
||||
|
||||
def all_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
specs = []
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(os.sep))
|
||||
path_elems += [self.metadata_dir, self.spec_file_name]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
specs.extend([self.read_spec(s) for s in spec_files])
|
||||
return specs
|
||||
|
||||
def all_deprecated_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
deprecated_specs = set()
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(os.sep))
|
||||
path_elems += [self.metadata_dir, self.deprecated_dir,
|
||||
'*_' + self.spec_file_name]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
get_depr_spec_file = lambda x: os.path.join(
|
||||
os.path.dirname(os.path.dirname(x)), self.spec_file_name)
|
||||
deprecated_specs |= set((self.read_spec(s),
|
||||
self.read_spec(get_depr_spec_file(s)))
|
||||
for s in spec_files)
|
||||
return deprecated_specs
|
||||
|
||||
def specs_by_hash(self):
|
||||
by_hash = {}
|
||||
for spec in self.all_specs():
|
||||
by_hash[spec.dag_hash()] = spec
|
||||
return by_hash
|
||||
|
||||
|
||||
class YamlViewExtensionsLayout(ExtensionsLayout):
|
||||
"""Maintain extensions within a view.
|
||||
"""
|
||||
@@ -537,7 +544,7 @@ def _write_extensions(self, spec, extensions):
|
||||
|
||||
# Create a temp file in the same directory as the actual file.
|
||||
dirname, basename = os.path.split(path)
|
||||
mkdirp(dirname)
|
||||
fs.mkdirp(dirname)
|
||||
|
||||
tmp = tempfile.NamedTemporaryFile(
|
||||
prefix=basename, dir=dirname, delete=False)
|
||||
|
@@ -259,7 +259,7 @@ def deactivate(shell='sh'):
|
||||
tty.warn('Could not fully deactivate view due to missing package '
|
||||
'or repo, shell environment may be corrupt.')
|
||||
|
||||
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
_active_environment = None
|
||||
|
||||
return cmds
|
||||
@@ -1698,7 +1698,8 @@ def _to_lockfile_dict(self):
|
||||
dag_hash_all = s.build_hash()
|
||||
if dag_hash_all not in concrete_specs:
|
||||
spec_dict = s.to_node_dict(hash=ht.build_hash)
|
||||
spec_dict[s.name]['hash'] = s.dag_hash()
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[dag_hash_all] = spec_dict
|
||||
|
||||
hash_spec_list = zip(
|
||||
@@ -1744,7 +1745,7 @@ def _read_lockfile_dict(self, d):
|
||||
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
|
||||
|
||||
for dag_hash, node_dict in json_specs_by_hash.items():
|
||||
for dep_name, dep_hash, deptypes in (
|
||||
for _, dep_hash, deptypes, _ in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[dag_hash]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.dependency as dp
|
||||
|
||||
hashes = []
|
||||
|
||||
|
||||
class SpecHashDescriptor(object):
|
||||
"""This class defines how hashes are generated on Spec objects.
|
||||
@@ -16,36 +18,38 @@ class SpecHashDescriptor(object):
|
||||
include certain dependency types, and it may optionally include a
|
||||
canonicalized hash of the package.py for each node in the graph.
|
||||
|
||||
We currently use different hashes for different use cases.
|
||||
"""
|
||||
We currently use different hashes for different use cases."""
|
||||
|
||||
hash_types = ('_dag_hash', '_build_hash', '_full_hash', '_package_hash')
|
||||
|
||||
def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None,
|
||||
override=None):
|
||||
def __init__(self, deptype, package_hash, name, override=None):
|
||||
self.deptype = dp.canonical_deptype(deptype)
|
||||
self.package_hash = package_hash
|
||||
self.attr = attr
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
# Allow spec hashes to have an alternate computation method
|
||||
self.override = override
|
||||
|
||||
@property
|
||||
def attr(self):
|
||||
"""Private attribute stored on spec"""
|
||||
return '_' + self.name
|
||||
|
||||
|
||||
#: Default Hash descriptor, used by Spec.dag_hash() and stored in the DB.
|
||||
dag_hash = SpecHashDescriptor(deptype=('link', 'run'), package_hash=False,
|
||||
attr='_hash')
|
||||
dag_hash = SpecHashDescriptor(
|
||||
deptype=('link', 'run'), package_hash=False, name='hash')
|
||||
|
||||
|
||||
#: Hash descriptor that includes build dependencies.
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=False, attr='_build_hash')
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
|
||||
|
||||
#: Full hash used in build pipelines to determine when to rebuild packages.
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=True, attr='_full_hash')
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
||||
|
||||
|
||||
#: Package hash used as part of full hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, attr='_package_hash',
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=lambda s: s.package.content_hash())
|
||||
|
@@ -425,11 +425,15 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
data['tags'] = self.tags
|
||||
|
||||
# If we allow the spec to not exist (meaning we create it) we need to
|
||||
# include the full spec.yaml here
|
||||
# include the full specfile here
|
||||
if not spec_exists:
|
||||
meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||
spec_file = os.path.join(meta_dir, "spec.yaml")
|
||||
data['spec'] = syaml.load(read_file(spec_file))
|
||||
spec_file = os.path.join(meta_dir, "spec.json")
|
||||
if os.path.exists(spec_file):
|
||||
data['spec'] = sjson.load(read_file(spec_file))
|
||||
else:
|
||||
spec_file = os.path.join(meta_dir, "spec.yaml")
|
||||
data['spec'] = syaml.load(read_file(spec_file))
|
||||
|
||||
if self.save_local:
|
||||
return self.get_local_build_id(data, full_hash, return_response)
|
||||
|
@@ -12,9 +12,9 @@
|
||||
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack buildcache spec.yaml schema',
|
||||
'title': 'Spack buildcache specfile schema',
|
||||
'type': 'object',
|
||||
# 'additionalProperties': True,
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'buildinfo': {
|
||||
'type': 'object',
|
||||
@@ -26,7 +26,8 @@
|
||||
},
|
||||
},
|
||||
'spec': {
|
||||
'type': 'array',
|
||||
'type': 'object',
|
||||
'additionalProperties': True,
|
||||
'items': spack.schema.spec.properties,
|
||||
},
|
||||
'binary_cache_checksum': {
|
||||
|
@@ -3,7 +3,9 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for a spec found in spec.yaml or database index.json files
|
||||
"""Schema for a spec found in spec descriptor or database index.json files
|
||||
|
||||
TODO: This needs to be updated? Especially the hashes under properties.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
|
||||
:lines: 13-
|
||||
@@ -67,95 +69,127 @@
|
||||
},
|
||||
}
|
||||
|
||||
build_spec = {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['name', 'hash'],
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'hash': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
r'\w[\w-]*': { # package name
|
||||
'spec': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'version',
|
||||
'arch',
|
||||
'compiler',
|
||||
'namespace',
|
||||
'parameters',
|
||||
'_meta',
|
||||
'nodes'
|
||||
],
|
||||
'properties': {
|
||||
'hash': {'type': 'string'},
|
||||
'full_hash': {'type': 'string'},
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'number'},
|
||||
],
|
||||
},
|
||||
'arch': arch,
|
||||
'compiler': {
|
||||
'_meta': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'version': {'type': 'string'},
|
||||
},
|
||||
'version': {'type': 'number'}
|
||||
}
|
||||
},
|
||||
'develop': {
|
||||
'anyOf': [
|
||||
{'type': 'boolean'},
|
||||
{'type': 'string'},
|
||||
],
|
||||
},
|
||||
'namespace': {'type': 'string'},
|
||||
'parameters': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'cflags',
|
||||
'cppflags',
|
||||
'cxxflags',
|
||||
'fflags',
|
||||
'ldflags',
|
||||
'ldlibs',
|
||||
],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cppflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cxxflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'fflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldlib': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
'patches': {
|
||||
'nodes': {
|
||||
'type': 'array',
|
||||
'items': {},
|
||||
},
|
||||
'dependencies': dependencies,
|
||||
},
|
||||
},
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'version',
|
||||
'arch',
|
||||
'compiler',
|
||||
'namespace',
|
||||
'parameters',
|
||||
],
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'hash': {'type': 'string'},
|
||||
'full_hash': {'type': 'string'},
|
||||
'build_hash': {'type': 'string'},
|
||||
'package_hash': {'type': 'string'},
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'number'},
|
||||
],
|
||||
},
|
||||
'arch': arch,
|
||||
'compiler': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'version': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'develop': {
|
||||
'anyOf': [
|
||||
{'type': 'boolean'},
|
||||
{'type': 'string'},
|
||||
],
|
||||
},
|
||||
'namespace': {'type': 'string'},
|
||||
'parameters': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'cflags',
|
||||
'cppflags',
|
||||
'cxxflags',
|
||||
'fflags',
|
||||
'ldflags',
|
||||
'ldlibs',
|
||||
],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cppflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cxxflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'fflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldlib': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {},
|
||||
},
|
||||
'dependencies': dependencies,
|
||||
'build_spec': build_spec,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
|
@@ -1506,13 +1506,13 @@ def _spec_hash(self, hash):
|
||||
Arguments:
|
||||
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
|
||||
"""
|
||||
# TODO: curently we strip build dependencies by default. Rethink
|
||||
# TODO: currently we strip build dependencies by default. Rethink
|
||||
# this when we move to using package hashing on all specs.
|
||||
if hash.override is not None:
|
||||
return hash.override(self)
|
||||
node_dict = self.to_node_dict(hash=hash)
|
||||
yaml_text = syaml.dump(node_dict, default_flow_style=True)
|
||||
return spack.util.hash.b32_hash(yaml_text)
|
||||
json_text = sjson.dump(node_dict)
|
||||
return spack.util.hash.b32_hash(json_text)
|
||||
|
||||
def _cached_hash(self, hash, length=None):
|
||||
"""Helper function for storing a cached hash on the spec.
|
||||
@@ -1628,6 +1628,8 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""
|
||||
d = syaml.syaml_dict()
|
||||
|
||||
d['name'] = self.name
|
||||
|
||||
if self.versions:
|
||||
d.update(self.versions.to_dict())
|
||||
|
||||
@@ -1674,59 +1676,88 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
package_hash = package_hash.decode('utf-8')
|
||||
d['package_hash'] = package_hash
|
||||
|
||||
# Note: Relies on sorting dict by keys later in algorithm.
|
||||
deps = self.dependencies_dict(deptype=hash.deptype)
|
||||
if deps:
|
||||
d['dependencies'] = syaml.syaml_dict([
|
||||
(name,
|
||||
syaml.syaml_dict([
|
||||
('hash', dspec.spec._cached_hash(hash)),
|
||||
('type', sorted(str(s) for s in dspec.deptypes))])
|
||||
) for name, dspec in sorted(deps.items())
|
||||
])
|
||||
|
||||
return syaml.syaml_dict([(self.name, d)])
|
||||
if deps:
|
||||
deps_list = []
|
||||
for name, dspec in sorted(deps.items()):
|
||||
name_tuple = ('name', name)
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple,
|
||||
hash_tuple,
|
||||
type_tuple]))
|
||||
d['dependencies'] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
if self._build_spec:
|
||||
d['build_spec'] = syaml.syaml_dict([
|
||||
('name', self.build_spec.name),
|
||||
(hash.name, self.build_spec._cached_hash(hash))
|
||||
])
|
||||
return d
|
||||
|
||||
def to_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary suitable for writing this spec to YAML or JSON.
|
||||
|
||||
This dictionaries like the one that is ultimately written to a
|
||||
``spec.yaml`` file in each Spack installation directory. For
|
||||
``spec.json`` file in each Spack installation directory. For
|
||||
example, for sqlite::
|
||||
|
||||
{
|
||||
'spec': [
|
||||
{
|
||||
'sqlite': {
|
||||
'version': '3.28.0',
|
||||
'arch': {
|
||||
'platform': 'darwin',
|
||||
'platform_os': 'mojave',
|
||||
'target': 'x86_64',
|
||||
},
|
||||
'compiler': {
|
||||
'name': 'apple-clang',
|
||||
'version': '10.0.0',
|
||||
},
|
||||
'namespace': 'builtin',
|
||||
'parameters': {
|
||||
'fts': 'true',
|
||||
'functions': 'false',
|
||||
'cflags': [],
|
||||
'cppflags': [],
|
||||
'cxxflags': [],
|
||||
'fflags': [],
|
||||
'ldflags': [],
|
||||
'ldlibs': [],
|
||||
},
|
||||
'dependencies': {
|
||||
'readline': {
|
||||
'hash': 'zvaa4lhlhilypw5quj3akyd3apbq5gap',
|
||||
'type': ['build', 'link'],
|
||||
}
|
||||
},
|
||||
'hash': '722dzmgymxyxd6ovjvh4742kcetkqtfs'
|
||||
}
|
||||
"spec": {
|
||||
"_meta": {
|
||||
"version": 2
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sqlite",
|
||||
"version": "3.34.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "catalina",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "11.0.0"
|
||||
},
|
||||
"namespace": "builtin",
|
||||
"parameters": {
|
||||
"column_metadata": true,
|
||||
"fts": true,
|
||||
"functions": false,
|
||||
"rtree": false,
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "readline",
|
||||
"build_hash": "4f47cggum7p4qmp3xna4hi547o66unva",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "zlib",
|
||||
"build_hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
|
||||
"full_hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
|
||||
"build_hash": "tsjnz7lgob7bu2wd4sqzzjenxewc2zha"
|
||||
},
|
||||
# ... more node dicts for readline and its dependencies ...
|
||||
]
|
||||
}
|
||||
@@ -1751,18 +1782,31 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
hashes in the dictionary.
|
||||
|
||||
"""
|
||||
node_list = []
|
||||
node_list = [] # Using a list to preserve preorder traversal for hash.
|
||||
hash_set = set()
|
||||
for s in self.traverse(order='pre', deptype=hash.deptype):
|
||||
node_list.append(s.node_dict_with_hashes(hash))
|
||||
|
||||
return syaml.syaml_dict([('spec', node_list)])
|
||||
spec_hash = s.node_dict_with_hashes(hash)[hash.name]
|
||||
if spec_hash not in hash_set:
|
||||
node_list.append(s.node_dict_with_hashes(hash))
|
||||
hash_set.add(spec_hash)
|
||||
if s.build_spec is not s:
|
||||
build_spec_list = s.build_spec.to_dict(hash)['spec']['nodes']
|
||||
for node in build_spec_list:
|
||||
node_hash = node[hash.name]
|
||||
if node_hash not in hash_set:
|
||||
node_list.append(node)
|
||||
hash_set.add(node_hash)
|
||||
meta_dict = syaml.syaml_dict([('version', 2)])
|
||||
inner_dict = syaml.syaml_dict([('_meta', meta_dict), ('nodes', node_list)])
|
||||
spec_dict = syaml.syaml_dict([('spec', inner_dict)])
|
||||
return spec_dict
|
||||
|
||||
def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
""" Returns a node_dict of this spec with the dag hash added. If this
|
||||
spec is concrete, the full hash is added as well. If 'build' is in
|
||||
the hash_type, the build hash is also added. """
|
||||
node = self.to_node_dict(hash)
|
||||
node[self.name]['hash'] = self.dag_hash()
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
# full_hash and build_hash are lazily computed -- but if we write
|
||||
# a spec out, we want them to be included. This is effectively
|
||||
@@ -1781,42 +1825,19 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
write_full_hash = (
|
||||
self._hashes_final and self._full_hash or # cached and final
|
||||
not self._hashes_final) # lazily compute
|
||||
if write_full_hash:
|
||||
node[ht.full_hash.name] = self.full_hash()
|
||||
write_build_hash = 'build' in hash.deptype and (
|
||||
self._hashes_final and self._build_hash or # cached and final
|
||||
not self._hashes_final) # lazily compute
|
||||
|
||||
if write_full_hash:
|
||||
node[self.name]['full_hash'] = self.full_hash()
|
||||
if write_build_hash:
|
||||
node[self.name]['build_hash'] = self.build_hash()
|
||||
|
||||
node[ht.build_hash.name] = self.build_hash()
|
||||
else:
|
||||
node['concrete'] = False
|
||||
if hash.name == 'build_hash':
|
||||
node[hash.name] = self.build_hash()
|
||||
return node
|
||||
|
||||
def to_record_dict(self):
|
||||
"""Return a "flat" dictionary with name and hash as top-level keys.
|
||||
|
||||
This is similar to ``to_node_dict()``, but the name and the hash
|
||||
are "flattened" into the dictionary for easiler parsing by tools
|
||||
like ``jq``. Instead of being keyed by name or hash, the
|
||||
dictionary "name" and "hash" fields, e.g.::
|
||||
|
||||
{
|
||||
"name": "openssl"
|
||||
"hash": "3ws7bsihwbn44ghf6ep4s6h4y2o6eznv"
|
||||
"version": "3.28.0",
|
||||
"arch": {
|
||||
...
|
||||
}
|
||||
|
||||
But is otherwise the same as ``to_node_dict()``.
|
||||
|
||||
"""
|
||||
dictionary = syaml.syaml_dict()
|
||||
dictionary["name"] = self.name
|
||||
dictionary["hash"] = self.dag_hash()
|
||||
dictionary.update(self.to_node_dict()[self.name])
|
||||
return dictionary
|
||||
|
||||
def to_yaml(self, stream=None, hash=ht.dag_hash):
|
||||
return syaml.dump(
|
||||
self.to_dict(hash), stream=stream, default_flow_style=False)
|
||||
@@ -1826,16 +1847,19 @@ def to_json(self, stream=None, hash=ht.dag_hash):
|
||||
|
||||
@staticmethod
|
||||
def from_node_dict(node):
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
|
||||
spec = Spec()
|
||||
if 'name' in node.keys():
|
||||
# New format
|
||||
name = node['name']
|
||||
else:
|
||||
# Old format
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
spec.name = name
|
||||
spec.namespace = node.get('namespace', None)
|
||||
spec._hash = node.get('hash', None)
|
||||
spec._build_hash = node.get('build_hash', None)
|
||||
spec._full_hash = node.get('full_hash', None)
|
||||
spec._package_hash = node.get('package_hash', None)
|
||||
|
||||
if 'version' in node or 'versions' in node:
|
||||
spec.versions = vn.VersionList.from_dict(node)
|
||||
@@ -1898,41 +1922,65 @@ def from_node_dict(node):
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_node_dict() is used by
|
||||
# from_yaml() to read the root *and* each dependency spec.
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node['build_spec']
|
||||
return build_spec_dict['name'], build_spec_dict[hash_type], hash_type
|
||||
|
||||
@staticmethod
|
||||
def dependencies_from_node_dict(node):
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
if 'name' in node.keys():
|
||||
# New format
|
||||
name = node['name']
|
||||
else:
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
if 'dependencies' not in node:
|
||||
return
|
||||
for t in Spec.read_yaml_dep_specs(node['dependencies']):
|
||||
yield t
|
||||
|
||||
@staticmethod
|
||||
def read_yaml_dep_specs(dependency_dict):
|
||||
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
for dep_name, elt in dependency_dict.items():
|
||||
dep_iter = deps.items() if isinstance(deps, dict) else deps
|
||||
for dep in dep_iter:
|
||||
if isinstance(dep, tuple):
|
||||
dep_name, elt = dep
|
||||
else:
|
||||
elt = dep
|
||||
dep_name = dep['name']
|
||||
if isinstance(elt, six.string_types):
|
||||
# original format, elt is just the dependency hash.
|
||||
dag_hash, deptypes = elt, ['build', 'link']
|
||||
dep_hash, deptypes = elt, ['build', 'link']
|
||||
elif isinstance(elt, tuple):
|
||||
# original deptypes format: (used tuples, not future-proof)
|
||||
dag_hash, deptypes = elt
|
||||
dep_hash, deptypes = elt
|
||||
elif isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
dag_hash, deptypes = elt['hash'], elt['type']
|
||||
for key in (ht.full_hash.name,
|
||||
ht.build_hash.name,
|
||||
ht.dag_hash.name):
|
||||
if key in elt:
|
||||
dep_hash, deptypes = elt[key], elt['type']
|
||||
hash_type = key
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError(
|
||||
"Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError(
|
||||
"Couldn't parse dependency types in spec.")
|
||||
|
||||
yield dep_name, dag_hash, list(deptypes)
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
@staticmethod
|
||||
def from_literal(spec_dict, normal=True):
|
||||
@@ -2088,11 +2136,12 @@ def spec_and_dependency_types(s):
|
||||
|
||||
@staticmethod
|
||||
def from_dict(data):
|
||||
"""Construct a spec from YAML.
|
||||
"""Construct a spec from JSON/YAML.
|
||||
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
|
||||
return _spec_from_dict(data)
|
||||
|
||||
@staticmethod
|
||||
@@ -2596,7 +2645,7 @@ def concretized(self, tests=False):
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
clone = self.copy(caches=False)
|
||||
clone = self.copy(caches=True)
|
||||
clone.concretize(tests=tests)
|
||||
return clone
|
||||
|
||||
@@ -3179,6 +3228,7 @@ def satisfies(self, other, deps=True, strict=False, strict_deps=False):
|
||||
* `strict`: strict means that we *must* meet all the
|
||||
constraints specified on other.
|
||||
"""
|
||||
|
||||
other = self._autospec(other)
|
||||
|
||||
# The only way to satisfy a concrete spec is to match its hash exactly.
|
||||
@@ -4322,11 +4372,11 @@ def splice(self, other, transitive):
|
||||
# Record whether hashes are already cached
|
||||
# So we don't try to compute a hash from insufficient
|
||||
# provenance later
|
||||
has_build_hash = getattr(dep, ht.build_hash.attr, None)
|
||||
has_full_hash = getattr(dep, ht.full_hash.attr, None)
|
||||
has_build_hash = getattr(dep, ht.build_hash.name, None)
|
||||
has_full_hash = getattr(dep, ht.full_hash.name, None)
|
||||
|
||||
# package hash cannot be affected by splice
|
||||
dep.clear_cached_hashes(ignore=['_package_hash'])
|
||||
dep.clear_cached_hashes(ignore=['package_hash'])
|
||||
|
||||
# Since this is a concrete spec, we want to make sure hashes
|
||||
# are cached writing specs only writes cached hashes in case
|
||||
@@ -4343,10 +4393,10 @@ def clear_cached_hashes(self, ignore=()):
|
||||
"""
|
||||
Clears all cached hashes in a Spec, while preserving other properties.
|
||||
"""
|
||||
for attr in ht.SpecHashDescriptor.hash_types:
|
||||
if attr not in ignore:
|
||||
if hasattr(self, attr):
|
||||
setattr(self, attr, None)
|
||||
for h in ht.hashes:
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the DAG hash and just use
|
||||
@@ -4368,12 +4418,10 @@ def __reduce__(self):
|
||||
return _spec_from_dict, (self.to_dict(hash=ht.build_hash),)
|
||||
|
||||
|
||||
# Note: This function has been refactored from being a static method
|
||||
# of Spec to be a function at the module level. This was needed to
|
||||
# support its use in __reduce__ to pickle a Spec object in Python 2.
|
||||
# It can be moved back safely after we drop support for Python 2.7
|
||||
def _spec_from_dict(data):
|
||||
"""Construct a spec from YAML.
|
||||
def _spec_from_old_dict(data):
|
||||
"""Construct a spec from JSON/YAML using the format version 1.
|
||||
Note: Version 1 format has no notion of a build_spec, and names are
|
||||
guaranteed to be unique.
|
||||
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
@@ -4395,13 +4443,75 @@ def _spec_from_dict(data):
|
||||
if 'dependencies' not in node[name]:
|
||||
continue
|
||||
|
||||
yaml_deps = node[name]['dependencies']
|
||||
for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
deps[name]._add_dependency(deps[dname], dtypes)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
# Note: This function has been refactored from being a static method
|
||||
# of Spec to be a function at the module level. This was needed to
|
||||
# support its use in __reduce__ to pickle a Spec object in Python 2.
|
||||
# It can be moved back safely after we drop support for Python 2.7
|
||||
def _spec_from_dict(data):
|
||||
"""Construct a spec from YAML.
|
||||
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
if isinstance(data['spec'], list): # Legacy specfile format
|
||||
return _spec_from_old_dict(data)
|
||||
|
||||
# Current specfile format
|
||||
nodes = data['spec']['nodes']
|
||||
hash_type = None
|
||||
any_deps = False
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
if 'dependencies' in node.keys():
|
||||
any_deps = True
|
||||
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name # use the full_hash provenance
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError("Spec dictionary contains malformed "
|
||||
"dependencies. Old format?")
|
||||
|
||||
hash_dict = {}
|
||||
root_spec_hash = None
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
if 'build_spec' in node.keys():
|
||||
node_hash = node[hash_type]
|
||||
else:
|
||||
node_hash = node[hash_type]
|
||||
node_spec = Spec.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]['node_spec'] = node_spec
|
||||
if i == 0:
|
||||
root_spec_hash = node_hash
|
||||
if not root_spec_hash:
|
||||
raise spack.error.SpecError("Spec dictionary contains no nodes.")
|
||||
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node['node_spec']
|
||||
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]['node_spec'], dtypes)
|
||||
if 'build_spec' in node.keys():
|
||||
_, bhash, _ = Spec.build_spec_from_node_dict(node,
|
||||
hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]['node_spec']
|
||||
|
||||
return hash_dict[root_spec_hash]['node_spec']
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
@@ -4439,8 +4549,8 @@ def __init__(self):
|
||||
(r'\=', lambda scanner, val: self.token(EQ, val)),
|
||||
|
||||
# Filenames match before identifiers, so no initial filename
|
||||
# component is parsed as a spec (e.g., in subdir/spec.yaml)
|
||||
(r'[/\w.-]*/[/\w/-]+\.yaml[^\b]*',
|
||||
# component is parsed as a spec (e.g., in subdir/spec.yaml/json)
|
||||
(r'[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*',
|
||||
lambda scanner, v: self.token(FILE, v)),
|
||||
|
||||
# Hash match after filename. No valid filename can be a hash
|
||||
@@ -4599,8 +4709,8 @@ def spec_from_file(self):
|
||||
1. We require that filenames end in .yaml, which means that no valid
|
||||
filename can be interpreted as a hash (hashes can't have '.')
|
||||
|
||||
2. We avoid treating paths like /path/to/spec.yaml as hashes, or paths
|
||||
like subdir/spec.yaml as ids by lexing filenames before hashes.
|
||||
2. We avoid treating paths like /path/to/spec.json as hashes, or paths
|
||||
like subdir/spec.json as ids by lexing filenames before hashes.
|
||||
|
||||
3. For spec names that match file and id regexes, like 'builtin.yaml',
|
||||
we backtrack from spec_from_file() and treat them as spec names.
|
||||
@@ -4615,14 +4725,16 @@ def spec_from_file(self):
|
||||
# The error is clearly an omitted space. To handle this, the FILE
|
||||
# regex admits text *beyond* .yaml, and we raise a nice error for
|
||||
# file names that don't end in .yaml.
|
||||
if not path.endswith(".yaml"):
|
||||
if not (path.endswith(".yaml") or path.endswith(".json")):
|
||||
raise SpecFilenameError(
|
||||
"Spec filename must end in .yaml: '{0}'".format(path))
|
||||
"Spec filename must end in .yaml or .json: '{0}'".format(path))
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise NoSuchSpecFileError("No such spec file: '{0}'".format(path))
|
||||
|
||||
with open(path) as f:
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(f)
|
||||
return Spec.from_yaml(f)
|
||||
|
||||
def parse_compiler(self, text):
|
||||
@@ -4785,16 +4897,24 @@ def parse(string):
|
||||
return SpecParser().parse(string)
|
||||
|
||||
|
||||
def save_dependency_spec_yamls(
|
||||
root_spec_as_yaml, output_directory, dependencies=None):
|
||||
def save_dependency_specfiles(
|
||||
root_spec_info, output_directory, dependencies=None,
|
||||
spec_format='json'):
|
||||
"""Given a root spec (represented as a yaml object), index it with a subset
|
||||
of its dependencies, and write each dependency to a separate yaml file
|
||||
in the output directory. By default, all dependencies will be written
|
||||
out. To choose a smaller subset of dependencies to be written, pass a
|
||||
list of package names in the dependencies parameter. In case of any
|
||||
kind of error, SaveSpecDependenciesError is raised with a specific
|
||||
message about what went wrong."""
|
||||
root_spec = Spec.from_yaml(root_spec_as_yaml)
|
||||
list of package names in the dependencies parameter. If the format of the
|
||||
incoming spec is not json, that can be specified with the spec_format
|
||||
parameter. This can be used to convert from yaml specfiles to the
|
||||
json format."""
|
||||
if spec_format == 'json':
|
||||
root_spec = Spec.from_json(root_spec_info)
|
||||
elif spec_format == 'yaml':
|
||||
root_spec = Spec.from_yaml(root_spec_info)
|
||||
else:
|
||||
raise SpecParseError('Unrecognized spec format {0}.'.format(
|
||||
spec_format))
|
||||
|
||||
dep_list = dependencies
|
||||
if not dep_list:
|
||||
@@ -4806,10 +4926,10 @@ def save_dependency_spec_yamls(
|
||||
dep_name, root_spec.name)
|
||||
raise SpecDependencyNotFoundError(msg)
|
||||
dep_spec = root_spec[dep_name]
|
||||
yaml_path = os.path.join(output_directory, '{0}.yaml'.format(dep_name))
|
||||
json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
|
||||
|
||||
with open(yaml_path, 'w') as fd:
|
||||
fd.write(dep_spec.to_yaml(hash=ht.build_hash))
|
||||
with open(json_path, 'w') as fd:
|
||||
fd.write(dep_spec.to_json(hash=ht.build_hash))
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
|
@@ -16,12 +16,6 @@
|
||||
but we use a fancier directory layout to make browsing the store and
|
||||
debugging easier.
|
||||
|
||||
The directory layout is currently hard-coded to be a YAMLDirectoryLayout,
|
||||
so called because it stores build metadata within each prefix, in
|
||||
`spec.yaml` files. In future versions of Spack we may consider allowing
|
||||
install trees to define their own layouts with some per-tree
|
||||
configuration.
|
||||
|
||||
"""
|
||||
import contextlib
|
||||
import os
|
||||
@@ -162,7 +156,7 @@ def __init__(
|
||||
self.hash_length = hash_length
|
||||
self.db = spack.database.Database(
|
||||
root, upstream_dbs=retrieve_upstream_dbs())
|
||||
self.layout = spack.directory_layout.YamlDirectoryLayout(
|
||||
self.layout = spack.directory_layout.DirectoryLayout(
|
||||
root, projections=projections, hash_length=hash_length)
|
||||
|
||||
def reindex(self):
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import glob
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import py
|
||||
@@ -22,7 +23,8 @@
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.web as web_util
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
mirror_cmd = spack.main.SpackCommand('mirror')
|
||||
@@ -30,6 +32,8 @@
|
||||
uninstall_cmd = spack.main.SpackCommand('uninstall')
|
||||
buildcache_cmd = spack.main.SpackCommand('buildcache')
|
||||
|
||||
legacy_mirror_dir = os.path.join(test_path, 'data', 'mirrors', 'legacy_yaml')
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_directory(tmpdir):
|
||||
@@ -59,6 +63,16 @@ def test_mirror(mirror_dir):
|
||||
mirror_cmd('rm', '--scope=site', 'test-mirror-func')
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def test_legacy_mirror(mutable_config, tmpdir):
|
||||
mirror_dir = tmpdir.join('legacy_yaml_mirror')
|
||||
shutil.copytree(legacy_mirror_dir, mirror_dir.strpath)
|
||||
mirror_url = 'file://%s' % mirror_dir
|
||||
mirror_cmd('add', '--scope', 'site', 'test-legacy-yaml', mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd('rm', '--scope=site', 'test-legacy-yaml')
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp('test_configs')
|
||||
@@ -141,7 +155,7 @@ def install_dir_default_layout(tmpdir):
|
||||
real_store, real_layout = spack.store.store, spack.store.layout
|
||||
opt_dir = tmpdir.join('opt')
|
||||
spack.store.store = spack.store.Store(str(opt_dir))
|
||||
spack.store.layout = YamlDirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
@@ -159,7 +173,7 @@ def install_dir_non_default_layout(tmpdir):
|
||||
real_store, real_layout = spack.store.store, spack.store.layout
|
||||
opt_dir = tmpdir.join('opt')
|
||||
spack.store.store = spack.store.Store(str(opt_dir))
|
||||
spack.store.layout = YamlDirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
@@ -557,7 +571,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
# Switch the store to the new install tree locations
|
||||
newtree_dir = tmpdir.join('newtree')
|
||||
s = spack.store.Store(str(newtree_dir))
|
||||
s.layout = YamlDirectoryLayout(str(newtree_dir), path_scheme=scheme)
|
||||
s.layout = DirectoryLayout(str(newtree_dir), path_scheme=scheme)
|
||||
|
||||
with spack.store.use_store(s):
|
||||
new_spec = Spec('old-sbang')
|
||||
@@ -594,11 +608,20 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
uninstall_cmd('-y', '/%s' % new_spec.dag_hash())
|
||||
|
||||
|
||||
# Need one where the platform has been changed to the test platform.
|
||||
def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
|
||||
mock_packages):
|
||||
install_cmd('--no-check-signature', '--cache-only', '-f', legacy_mirror_dir
|
||||
+ '/build_cache/test-debian6-core2-gcc-4.5.0-zlib-' +
|
||||
'1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml')
|
||||
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
|
||||
)
|
||||
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
|
||||
"""Ensure spack buildcache update-index properly fixes up spec.yaml
|
||||
"""Ensure spack buildcache update-index properly fixes up spec descriptor
|
||||
files on the mirror when updating the buildcache index."""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
@@ -618,29 +641,28 @@ def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
|
||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
||||
|
||||
# Simulate an update to b that only affects full hash by simply overwriting
|
||||
# the full hash in the spec.yaml file on the mirror
|
||||
b_spec_yaml_name = bindist.tarball_name(b, '.spec.yaml')
|
||||
b_spec_yaml_path = os.path.join(mirror_dir.strpath,
|
||||
# the full hash in the spec.json file on the mirror
|
||||
b_spec_json_name = bindist.tarball_name(b, '.spec.json')
|
||||
b_spec_json_path = os.path.join(mirror_dir.strpath,
|
||||
bindist.build_cache_relative_path(),
|
||||
b_spec_yaml_name)
|
||||
fs.filter_file(r"full_hash:\s[^\s]+$",
|
||||
"full_hash: {0}".format(new_b_full_hash),
|
||||
b_spec_yaml_path)
|
||||
|
||||
b_spec_json_name)
|
||||
fs.filter_file(r'"full_hash":\s"\S+"',
|
||||
'"full_hash": "{0}"'.format(new_b_full_hash),
|
||||
b_spec_json_path)
|
||||
# When we update the index, spack should notice that a's notion of the
|
||||
# full hash of b doesn't match b's notion of it's own full hash, and as
|
||||
# a result, spack should fix the spec.yaml for a
|
||||
# a result, spack should fix the spec.json for a
|
||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
||||
|
||||
# Read in the concrete spec yaml of a
|
||||
a_spec_yaml_name = bindist.tarball_name(a, '.spec.yaml')
|
||||
a_spec_yaml_path = os.path.join(mirror_dir.strpath,
|
||||
# Read in the concrete spec json of a
|
||||
a_spec_json_name = bindist.tarball_name(a, '.spec.json')
|
||||
a_spec_json_path = os.path.join(mirror_dir.strpath,
|
||||
bindist.build_cache_relative_path(),
|
||||
a_spec_yaml_name)
|
||||
a_spec_json_name)
|
||||
|
||||
# Turn concrete spec yaml into a concrete spec (a)
|
||||
with open(a_spec_yaml_path) as fd:
|
||||
a_prime = spec.Spec.from_yaml(fd.read())
|
||||
# Turn concrete spec json into a concrete spec (a)
|
||||
with open(a_spec_json_path) as fd:
|
||||
a_prime = spec.Spec.from_json(fd.read())
|
||||
|
||||
# Make sure the full hash of b in a's spec yaml matches the new value
|
||||
# Make sure the full hash of b in a's spec json matches the new value
|
||||
assert(a_prime[b.name].full_hash() == new_b_full_hash)
|
||||
|
@@ -31,7 +31,7 @@ def test_build_tarball_overwrite(
|
||||
spec, '.', force=True, unsigned=True)
|
||||
|
||||
# Remove the tarball and try again.
|
||||
# This must *also* throw, because of the existing .spec.yaml file
|
||||
# This must *also* throw, because of the existing .spec.json file
|
||||
os.remove(os.path.join(
|
||||
spack.binary_distribution.build_cache_prefix('.'),
|
||||
spack.binary_distribution.tarball_directory_name(spec),
|
||||
|
@@ -395,7 +395,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
|
||||
'paths': [
|
||||
'jobs_scratch_dir',
|
||||
'cdash_report',
|
||||
name + '.spec.yaml',
|
||||
name + '.spec.json',
|
||||
name + '.cdashid',
|
||||
name
|
||||
],
|
||||
|
@@ -90,7 +90,7 @@ def tests_buildcache_create(
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
|
||||
tarball = spack.binary_distribution.tarball_name(spec, '.spec.yaml')
|
||||
tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
|
||||
assert os.path.exists(
|
||||
os.path.join(str(tmpdir), 'build_cache', tarball_path))
|
||||
assert os.path.exists(
|
||||
@@ -112,7 +112,7 @@ def tests_buildcache_create_env(
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
|
||||
tarball = spack.binary_distribution.tarball_name(spec, '.spec.yaml')
|
||||
tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
|
||||
assert os.path.exists(
|
||||
os.path.join(str(tmpdir), 'build_cache', tarball_path))
|
||||
assert os.path.exists(
|
||||
@@ -249,3 +249,22 @@ def verify_mirror_contents():
|
||||
'--dest-mirror-name', 'dest')
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
|
||||
def test_buildcache_create_install(mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
mock_packages, mock_fetch, mock_stage,
|
||||
monkeypatch, tmpdir):
|
||||
""""Ensure that buildcache create creates output files"""
|
||||
pkg = 'trivial-install-test-package'
|
||||
install(pkg)
|
||||
|
||||
buildcache('create', '-d', str(tmpdir), '--unsigned', pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
|
||||
tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
|
||||
assert os.path.exists(
|
||||
os.path.join(str(tmpdir), 'build_cache', tarball_path))
|
||||
assert os.path.exists(
|
||||
os.path.join(str(tmpdir), 'build_cache', tarball))
|
||||
|
@@ -22,9 +22,10 @@
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.schema.buildcache_spec import schema as spec_yaml_schema
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.schema.gitlab_ci import schema as gitlab_ci_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
@@ -964,16 +965,16 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
spec_map = ci.get_concrete_specs(
|
||||
env, 'patchelf', 'patchelf', '', 'FIND_ANY')
|
||||
concrete_spec = spec_map['patchelf']
|
||||
spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
|
||||
yaml_path = str(tmpdir.join('spec.yaml'))
|
||||
with open(yaml_path, 'w') as ypfd:
|
||||
ypfd.write(spec_yaml)
|
||||
spec_json = concrete_spec.to_json(hash=ht.build_hash)
|
||||
json_path = str(tmpdir.join('spec.json'))
|
||||
with open(json_path, 'w') as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd('--keep-stage', yaml_path)
|
||||
install_cmd('--keep-stage', json_path)
|
||||
|
||||
# env, spec, yaml_path, mirror_url, build_id, sign_binaries
|
||||
# env, spec, json_path, mirror_url, build_id, sign_binaries
|
||||
ci.push_mirror_contents(
|
||||
env, concrete_spec, yaml_path, mirror_url, True)
|
||||
env, concrete_spec, json_path, mirror_url, True)
|
||||
|
||||
ci.write_cdashid_to_mirror('42', concrete_spec, mirror_url)
|
||||
|
||||
@@ -1031,15 +1032,14 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
# Now that index is regenerated, validate "buildcache list" output
|
||||
buildcache_list_output = buildcache_cmd('list', output=str)
|
||||
assert('patchelf' in buildcache_list_output)
|
||||
|
||||
# Also test buildcache_spec schema
|
||||
bc_files_list = os.listdir(buildcache_path)
|
||||
for file_name in bc_files_list:
|
||||
if file_name.endswith('.spec.yaml'):
|
||||
spec_yaml_path = os.path.join(buildcache_path, file_name)
|
||||
with open(spec_yaml_path) as yaml_fd:
|
||||
yaml_object = syaml.load(yaml_fd)
|
||||
validate(yaml_object, spec_yaml_schema)
|
||||
if file_name.endswith('.spec.json'):
|
||||
spec_json_path = os.path.join(buildcache_path, file_name)
|
||||
with open(spec_json_path) as json_fd:
|
||||
json_object = sjson.load(json_fd)
|
||||
validate(json_object, specfile_schema)
|
||||
|
||||
logs_dir = working_dir.join('logs_dir')
|
||||
if not os.path.exists(logs_dir.strpath):
|
||||
@@ -1058,17 +1058,15 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
dl_dir = working_dir.join('download_dir')
|
||||
if not os.path.exists(dl_dir.strpath):
|
||||
os.makedirs(dl_dir.strpath)
|
||||
|
||||
buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
|
||||
buildcache_cmd('download', '--spec-file', json_path, '--path',
|
||||
dl_dir.strpath, '--require-cdashid')
|
||||
|
||||
dl_dir_list = os.listdir(dl_dir.strpath)
|
||||
|
||||
assert(len(dl_dir_list) == 3)
|
||||
|
||||
|
||||
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
|
||||
def faked(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
def faked(env, spec_file=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
@@ -1550,7 +1548,7 @@ def test_ensure_only_one_temporary_storage():
|
||||
def test_ci_generate_temp_storage_url(tmpdir, mutable_mock_env_path,
|
||||
env_deactivate, install_mockery,
|
||||
mock_packages, monkeypatch,
|
||||
project_dir_env):
|
||||
project_dir_env, mock_binary_index):
|
||||
"""Verify correct behavior when using temporary-storage-url-prefix"""
|
||||
project_dir_env(tmpdir.strpath)
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
@@ -1662,7 +1660,7 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||
|
||||
def test_ci_reproduce(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
install_mockery, mock_packages, monkeypatch,
|
||||
last_two_git_commits, project_dir_env):
|
||||
last_two_git_commits, project_dir_env, mock_binary_index):
|
||||
project_dir_env(tmpdir.strpath)
|
||||
working_dir = tmpdir.join('repro_dir')
|
||||
image_name = 'org/image:tag'
|
||||
@@ -1756,13 +1754,11 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
||||
|
||||
monkeypatch.setattr(ci, 'download_and_extract_artifacts',
|
||||
fake_download_and_extract_artifacts)
|
||||
|
||||
rep_out = ci_cmd('reproduce-build',
|
||||
'https://some.domain/api/v1/projects/1/jobs/2/artifacts',
|
||||
'--working-dir',
|
||||
working_dir.strpath,
|
||||
output=str)
|
||||
|
||||
expect_out = 'docker run --rm -v {0}:{0} -ti {1}'.format(
|
||||
working_dir.strpath, image_name)
|
||||
|
||||
|
@@ -36,13 +36,13 @@ def test_create_db_tarball(tmpdir, database):
|
||||
# DB file is included
|
||||
assert 'index.json' in contents
|
||||
|
||||
# spec.yamls from all installs are included
|
||||
# specfiles from all installs are included
|
||||
for spec in database.query():
|
||||
# externals won't have a spec.yaml
|
||||
# externals won't have a specfile
|
||||
if spec.external:
|
||||
continue
|
||||
|
||||
spec_suffix = '%s/.spack/spec.yaml' % spec.dag_hash()
|
||||
spec_suffix = '%s/.spack/spec.json' % spec.dag_hash()
|
||||
assert spec_suffix in contents
|
||||
|
||||
|
||||
|
@@ -11,11 +11,12 @@
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
install = spack.main.SpackCommand('install')
|
||||
diff = spack.main.SpackCommand('diff')
|
||||
install_cmd = spack.main.SpackCommand('install')
|
||||
diff_cmd = spack.main.SpackCommand('diff')
|
||||
find_cmd = spack.main.SpackCommand('find')
|
||||
|
||||
|
||||
def test_diff(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test that we can install two packages and diff them"""
|
||||
|
||||
specA = spack.spec.Spec('mpileaks').concretized()
|
||||
@@ -36,20 +37,20 @@ def test_diff(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
|
||||
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test with and without the --first option"""
|
||||
install('mpileaks')
|
||||
install_cmd('mpileaks')
|
||||
|
||||
# Only one version of mpileaks will work
|
||||
diff('mpileaks', 'mpileaks')
|
||||
diff_cmd('mpileaks', 'mpileaks')
|
||||
|
||||
# 2 specs are required for a diff
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
diff('mpileaks')
|
||||
diff_cmd('mpileaks')
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
diff('mpileaks', 'mpileaks', 'mpileaks')
|
||||
diff_cmd('mpileaks', 'mpileaks', 'mpileaks')
|
||||
|
||||
# Ensure they are the same
|
||||
assert "No differences" in diff('mpileaks', 'mpileaks')
|
||||
output = diff('--json', 'mpileaks', 'mpileaks')
|
||||
assert "No differences" in diff_cmd('mpileaks', 'mpileaks')
|
||||
output = diff_cmd('--json', 'mpileaks', 'mpileaks')
|
||||
result = sjson.load(output)
|
||||
|
||||
assert len(result['a_not_b']) == 0
|
||||
@@ -60,20 +61,25 @@ def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
assert "intersect" in result and len(result['intersect']) > 50
|
||||
|
||||
# After we install another version, it should ask us to disambiguate
|
||||
install('mpileaks+debug')
|
||||
install_cmd('mpileaks+debug')
|
||||
|
||||
# There are two versions of mpileaks
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
diff('mpileaks', 'mpileaks+debug')
|
||||
diff_cmd('mpileaks', 'mpileaks+debug')
|
||||
|
||||
# But if we tell it to use the first, it won't try to disambiguate
|
||||
assert "variant" in diff('--first', 'mpileaks', 'mpileaks+debug')
|
||||
assert "variant" in diff_cmd('--first', 'mpileaks', 'mpileaks+debug')
|
||||
|
||||
# This matches them exactly
|
||||
output = diff("--json", "mpileaks@2.3/ysubb76", "mpileaks@2.3/ft5qff3")
|
||||
debug_hash = find_cmd('--format', '{hash}', 'mpileaks+debug').strip()
|
||||
no_debug_hashes = find_cmd('--format', '{hash}', 'mpileaks~debug')
|
||||
no_debug_hash = no_debug_hashes.split()[0]
|
||||
output = diff_cmd("--json",
|
||||
"mpileaks/{0}".format(debug_hash),
|
||||
"mpileaks/{0}".format(no_debug_hash))
|
||||
result = sjson.load(output)
|
||||
|
||||
assert len(result['a_not_b']) == 1
|
||||
assert len(result['b_not_a']) == 1
|
||||
assert result['a_not_b'][0] == ['variant_value', 'mpileaks debug False']
|
||||
assert result['b_not_a'][0] == ['variant_value', 'mpileaks debug True']
|
||||
assert result['a_not_b'][0] == ['variant_value', 'mpileaks debug True']
|
||||
assert result['b_not_a'][0] == ['variant_value', 'mpileaks debug False']
|
||||
|
@@ -130,9 +130,9 @@ def _check_json_output(spec_list):
|
||||
assert all(spec["name"] == "mpileaks" for spec in spec_list)
|
||||
|
||||
deps = [spec["dependencies"] for spec in spec_list]
|
||||
assert sum(["zmpi" in d for d in deps]) == 1
|
||||
assert sum(["mpich" in d for d in deps]) == 1
|
||||
assert sum(["mpich2" in d for d in deps]) == 1
|
||||
assert sum(["zmpi" in [node["name"] for d in deps for node in d]]) == 1
|
||||
assert sum(["mpich" in [node["name"] for d in deps for node in d]]) == 1
|
||||
assert sum(["mpich2" in [node["name"] for d in deps for node in d]]) == 1
|
||||
|
||||
|
||||
def _check_json_output_deps(spec_list):
|
||||
|
@@ -962,11 +962,16 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||
assert 'using the `spack.yaml` in this directory' in output
|
||||
|
||||
|
||||
default_full_hash = spack.spec.Spec.full_hash
|
||||
|
||||
|
||||
def fake_full_hash(spec):
|
||||
# Generate an arbitrary hash that is intended to be different than
|
||||
# whatever a Spec reported before (to test actions that trigger when
|
||||
# the hash changes)
|
||||
return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
|
||||
if spec.name == 'libdwarf':
|
||||
return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
|
||||
return default_full_hash(spec)
|
||||
|
||||
|
||||
def test_cache_install_full_hash_match(
|
||||
|
@@ -25,6 +25,7 @@
|
||||
from llnl.util.filesystem import mkdirp, remove_linked_tree, working_dir
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution
|
||||
import spack.caches
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -315,6 +316,18 @@ def mock_fetch_cache(monkeypatch):
|
||||
monkeypatch.setattr(spack.caches, 'fetch_cache', MockCache())
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_binary_index(monkeypatch, tmpdir_factory):
|
||||
"""Changes the directory for the binary index and creates binary index for
|
||||
every test. Clears its own index when it's done.
|
||||
"""
|
||||
tmpdir = tmpdir_factory.mktemp('mock_binary_index')
|
||||
index_path = tmpdir.join('binary_index').strpath
|
||||
mock_index = spack.binary_distribution.BinaryCacheIndex(index_path)
|
||||
monkeypatch.setattr(spack.binary_distribution, 'binary_index', mock_index)
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _skip_if_missing_executables(request):
|
||||
"""Permits to mark tests with 'require_executables' and skip the
|
||||
|
@@ -0,0 +1,41 @@
|
||||
spec:
|
||||
- zlib:
|
||||
version: 1.2.11
|
||||
arch:
|
||||
platform: test
|
||||
platform_os: debian6
|
||||
target:
|
||||
name: core2
|
||||
vendor: GenuineIntel
|
||||
features:
|
||||
- mmx
|
||||
- sse
|
||||
- sse2
|
||||
- ssse3
|
||||
generation: 0
|
||||
parents:
|
||||
- nocona
|
||||
compiler:
|
||||
name: gcc
|
||||
version: 4.5.0
|
||||
namespace: builtin.mock
|
||||
parameters:
|
||||
optimize: true
|
||||
pic: true
|
||||
shared: true
|
||||
cflags: []
|
||||
cppflags: []
|
||||
cxxflags: []
|
||||
fflags: []
|
||||
ldflags: []
|
||||
ldlibs: []
|
||||
package_hash: eukp6mqxxlfuxslsodbwbqtsznajielhh4avm2vgteo4ifdsjgjq====
|
||||
hash: t5mczux3tfqpxwmg7egp7axy2jvyulqk
|
||||
full_hash: 6j4as6r3qd4qhf77yu44reyn2u6ggbuq
|
||||
build_hash: t5mczux3tfqpxwmg7egp7axy2jvyulqk
|
||||
binary_cache_checksum:
|
||||
hash_algorithm: sha256
|
||||
hash: a62b50aee38bb5d6d1cbf9cd2b0badaf3eaa282cd6db0472b4468ff968a5e7f2
|
||||
buildinfo:
|
||||
relative_prefix: test-debian6-core2/gcc-4.5.0/zlib-1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk
|
||||
relative_rpaths: false
|
Binary file not shown.
@@ -14,8 +14,8 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayout,
|
||||
InvalidDirectoryLayoutParametersError,
|
||||
YamlDirectoryLayout,
|
||||
)
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
|
||||
spec.concretize()
|
||||
|
||||
# Ensure default layout matches expected spec format
|
||||
layout_default = YamlDirectoryLayout(str(tmpdir))
|
||||
layout_default = DirectoryLayout(str(tmpdir))
|
||||
path_default = layout_default.relative_path_for_spec(spec)
|
||||
assert(path_default == spec.format(
|
||||
"{architecture}/"
|
||||
@@ -38,9 +38,9 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
|
||||
"{name}-{version}-{hash}"))
|
||||
|
||||
# Test hash_length parameter works correctly
|
||||
layout_10 = YamlDirectoryLayout(str(tmpdir), hash_length=10)
|
||||
layout_10 = DirectoryLayout(str(tmpdir), hash_length=10)
|
||||
path_10 = layout_10.relative_path_for_spec(spec)
|
||||
layout_7 = YamlDirectoryLayout(str(tmpdir), hash_length=7)
|
||||
layout_7 = DirectoryLayout(str(tmpdir), hash_length=7)
|
||||
path_7 = layout_7.relative_path_for_spec(spec)
|
||||
|
||||
assert(len(path_default) - len(path_10) == 22)
|
||||
@@ -49,8 +49,8 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
|
||||
# Test path_scheme
|
||||
arch, compiler, package7 = path_7.split('/')
|
||||
projections_package7 = {'all': "{name}-{version}-{hash:7}"}
|
||||
layout_package7 = YamlDirectoryLayout(str(tmpdir),
|
||||
projections=projections_package7)
|
||||
layout_package7 = DirectoryLayout(str(tmpdir),
|
||||
projections=projections_package7)
|
||||
path_package7 = layout_package7.relative_path_for_spec(spec)
|
||||
|
||||
assert(package7 == path_package7)
|
||||
@@ -62,7 +62,7 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
|
||||
ns_scheme = "${ARCHITECTURE}/${NAMESPACE}/${PACKAGE}-${VERSION}-${HASH:7}" # NOQA: ignore=E501
|
||||
arch_ns_scheme_projections = {'all': arch_scheme,
|
||||
'python': ns_scheme}
|
||||
layout_arch_ns = YamlDirectoryLayout(
|
||||
layout_arch_ns = DirectoryLayout(
|
||||
str(tmpdir), projections=arch_ns_scheme_projections)
|
||||
|
||||
arch_path_spec2 = layout_arch_ns.relative_path_for_spec(spec2)
|
||||
@@ -73,9 +73,9 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
|
||||
|
||||
# Ensure conflicting parameters caught
|
||||
with pytest.raises(InvalidDirectoryLayoutParametersError):
|
||||
YamlDirectoryLayout(str(tmpdir),
|
||||
hash_length=20,
|
||||
projections=projections_package7)
|
||||
DirectoryLayout(str(tmpdir),
|
||||
hash_length=20,
|
||||
projections=projections_package7)
|
||||
|
||||
|
||||
def test_read_and_write_spec(temporary_store, config, mock_packages):
|
||||
@@ -193,7 +193,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages):
|
||||
# enough to read a spec from the spec file.
|
||||
for spec, path in installed_specs.items():
|
||||
spec_from_file = layout.read_spec(
|
||||
os.path.join(path, '.spack', 'spec.yaml'))
|
||||
os.path.join(path, '.spack', 'spec.json'))
|
||||
|
||||
# To satisfy these conditions, directory layouts need to
|
||||
# read in concrete specs from their install dirs somehow.
|
||||
@@ -231,7 +231,7 @@ def test_yaml_directory_layout_build_path(tmpdir, config):
|
||||
spec = Spec('python')
|
||||
spec.concretize()
|
||||
|
||||
layout = YamlDirectoryLayout(str(tmpdir))
|
||||
layout = DirectoryLayout(str(tmpdir))
|
||||
rel_path = os.path.join(layout.metadata_dir, layout.packages_dir)
|
||||
assert layout.build_packages_path(spec) == os.path.join(spec.prefix,
|
||||
rel_path)
|
||||
|
@@ -1090,6 +1090,58 @@ def test_splice_subsequent(self, transitive):
|
||||
spec['splice-t'].full_hash())
|
||||
assert out2.spliced
|
||||
|
||||
@pytest.mark.parametrize('transitive', [True, False])
|
||||
def test_splice_dict(self, transitive):
|
||||
spec = Spec('splice-t')
|
||||
dep = Spec('splice-h+foo')
|
||||
spec.concretize()
|
||||
dep.concretize()
|
||||
out = spec.splice(dep, transitive)
|
||||
|
||||
# Sanity check all hashes are unique...
|
||||
assert spec.full_hash() != dep.full_hash()
|
||||
assert out.full_hash() != dep.full_hash()
|
||||
assert out.full_hash() != spec.full_hash()
|
||||
node_list = out.to_dict()['spec']['nodes']
|
||||
root_nodes = [n for n in node_list if n['full_hash'] == out.full_hash()]
|
||||
build_spec_nodes = [n for n in node_list if n['full_hash'] == spec.full_hash()]
|
||||
assert spec.full_hash() == out.build_spec.full_hash()
|
||||
assert len(root_nodes) == 1
|
||||
assert len(build_spec_nodes) == 1
|
||||
|
||||
@pytest.mark.parametrize('transitive', [True, False])
|
||||
def test_splice_dict_roundtrip(self, transitive):
|
||||
spec = Spec('splice-t')
|
||||
dep = Spec('splice-h+foo')
|
||||
spec.concretize()
|
||||
dep.concretize()
|
||||
out = spec.splice(dep, transitive)
|
||||
|
||||
# Sanity check all hashes are unique...
|
||||
assert spec.full_hash() != dep.full_hash()
|
||||
assert out.full_hash() != dep.full_hash()
|
||||
assert out.full_hash() != spec.full_hash()
|
||||
out_rt_spec = Spec.from_dict(out.to_dict()) # rt is "round trip"
|
||||
assert out_rt_spec.full_hash() == out.full_hash()
|
||||
out_rt_spec_bld_hash = out_rt_spec.build_spec.full_hash()
|
||||
out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.full_hash()
|
||||
out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.full_hash()
|
||||
|
||||
# In any case, the build spec for splice-t (root) should point to the
|
||||
# original spec, preserving build provenance.
|
||||
assert spec.full_hash() == out_rt_spec_bld_hash
|
||||
assert out_rt_spec.full_hash() != out_rt_spec_bld_hash
|
||||
|
||||
# The build spec for splice-h should always point to the introduced
|
||||
# spec, since that is the spec spliced in.
|
||||
assert dep['splice-h'].full_hash() == out_rt_spec_h_bld_hash
|
||||
|
||||
# The build spec for splice-z will depend on whether or not the splice
|
||||
# was transitive.
|
||||
expected_z_bld_hash = (dep['splice-z'].full_hash() if transitive else
|
||||
spec['splice-z'].full_hash())
|
||||
assert expected_z_bld_hash == out_rt_spec_z_bld_hash
|
||||
|
||||
@pytest.mark.parametrize('spec,constraint,expected_result', [
|
||||
('libelf target=haswell', 'target=broadwell', False),
|
||||
('libelf target=haswell', 'target=haswell', True),
|
||||
@@ -1135,3 +1187,29 @@ def test_is_extension_after_round_trip_to_dict(config, spec_str):
|
||||
# Using 'y' since the round-trip make us lose build dependencies
|
||||
for d in y.traverse():
|
||||
assert x[d.name].package.is_extension == y[d.name].package.is_extension
|
||||
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
with pytest.raises(SpecError, match='malformed'):
|
||||
Spec.from_dict({'spec': {'nodes': [{'dependencies': {'name': 'foo'}}]}})
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
with pytest.raises(SpecError, match="Couldn't parse"):
|
||||
Spec.from_dict(
|
||||
{
|
||||
'spec': {
|
||||
'nodes': [
|
||||
{
|
||||
'name': 'foo',
|
||||
'hash': 'thehash',
|
||||
'dependencies': [
|
||||
{
|
||||
'name': 'bar'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@@ -22,7 +22,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack import repo
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
@@ -38,52 +38,67 @@ def check_yaml_round_trip(spec):
|
||||
assert spec.eq_dag(spec_from_yaml)
|
||||
|
||||
|
||||
def check_json_round_trip(spec):
|
||||
json_text = spec.to_json()
|
||||
spec_from_json = Spec.from_json(json_text)
|
||||
assert spec.eq_dag(spec_from_json)
|
||||
|
||||
|
||||
def test_simple_spec():
|
||||
spec = Spec('mpileaks')
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_normal_spec(mock_packages):
|
||||
spec = Spec('mpileaks+debug~opt')
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_external_spec(config, mock_packages):
|
||||
spec = Spec('externaltool')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
spec = Spec('externaltest')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_ambiguous_version_spec(mock_packages):
|
||||
spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_concrete_spec(config, mock_packages):
|
||||
spec = Spec('mpileaks+debug~opt')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_yaml_multivalue(config, mock_packages):
|
||||
spec = Spec('multivalue-variant foo="bar,baz"')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_yaml_subdag(config, mock_packages):
|
||||
spec = Spec('mpileaks^mpich+debug')
|
||||
spec.concretize()
|
||||
yaml_spec = Spec.from_yaml(spec.to_yaml())
|
||||
json_spec = Spec.from_json(spec.to_json())
|
||||
|
||||
for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
|
||||
assert spec[dep].eq_dag(yaml_spec[dep])
|
||||
assert spec[dep].eq_dag(json_spec[dep])
|
||||
|
||||
|
||||
def test_using_ordered_dict(mock_packages):
|
||||
@@ -114,20 +129,6 @@ def descend_and_check(iterable, level=0):
|
||||
assert level >= 5
|
||||
|
||||
|
||||
def test_to_record_dict(mock_packages, config):
|
||||
specs = ['mpileaks', 'zmpi', 'dttop']
|
||||
for name in specs:
|
||||
spec = Spec(name).concretized()
|
||||
record = spec.to_record_dict()
|
||||
assert record["name"] == name
|
||||
assert "hash" in record
|
||||
|
||||
node = spec.to_node_dict()
|
||||
for key, value in node[name].items():
|
||||
assert key in record
|
||||
assert record[key] == value
|
||||
|
||||
|
||||
@pytest.mark.parametrize("hash_type", [
|
||||
ht.dag_hash,
|
||||
ht.build_hash,
|
||||
@@ -308,8 +309,8 @@ def check_specs_equal(original_spec, spec_yaml_path):
|
||||
return original_spec.eq_dag(spec_from_yaml)
|
||||
|
||||
|
||||
def test_save_dependency_spec_yamls_subset(tmpdir, config):
|
||||
output_path = str(tmpdir.mkdir('spec_yamls'))
|
||||
def test_save_dependency_spec_jsons_subset(tmpdir, config):
|
||||
output_path = str(tmpdir.mkdir('spec_jsons'))
|
||||
|
||||
default = ('build', 'link')
|
||||
|
||||
@@ -327,9 +328,70 @@ def test_save_dependency_spec_yamls_subset(tmpdir, config):
|
||||
spec_a.concretize()
|
||||
b_spec = spec_a['b']
|
||||
c_spec = spec_a['c']
|
||||
spec_a_yaml = spec_a.to_yaml(hash=ht.build_hash)
|
||||
spec_a_json = spec_a.to_json(hash=ht.build_hash)
|
||||
|
||||
save_dependency_spec_yamls(spec_a_yaml, output_path, ['b', 'c'])
|
||||
save_dependency_specfiles(spec_a_json, output_path, ['b', 'c'])
|
||||
|
||||
assert check_specs_equal(b_spec, os.path.join(output_path, 'b.yaml'))
|
||||
assert check_specs_equal(c_spec, os.path.join(output_path, 'c.yaml'))
|
||||
assert check_specs_equal(b_spec, os.path.join(output_path, 'b.json'))
|
||||
assert check_specs_equal(c_spec, os.path.join(output_path, 'c.json'))
|
||||
|
||||
|
||||
def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
"""Tests a simple legacy YAML with a dependency and ensures spec survives
|
||||
concretization."""
|
||||
yaml = """
|
||||
spec:
|
||||
- a:
|
||||
version: '2.0'
|
||||
arch:
|
||||
platform: linux
|
||||
platform_os: rhel7
|
||||
target: x86_64
|
||||
compiler:
|
||||
name: gcc
|
||||
version: 8.3.0
|
||||
namespace: builtin.mock
|
||||
parameters:
|
||||
bvv: true
|
||||
foo:
|
||||
- bar
|
||||
foobar: bar
|
||||
cflags: []
|
||||
cppflags: []
|
||||
cxxflags: []
|
||||
fflags: []
|
||||
ldflags: []
|
||||
ldlibs: []
|
||||
dependencies:
|
||||
b:
|
||||
hash: iaapywazxgetn6gfv2cfba353qzzqvhn
|
||||
type:
|
||||
- build
|
||||
- link
|
||||
hash: obokmcsn3hljztrmctbscmqjs3xclazz
|
||||
full_hash: avrk2tqsnzxeabmxa6r776uq7qbpeufv
|
||||
build_hash: obokmcsn3hljztrmctbscmqjs3xclazy
|
||||
- b:
|
||||
version: '1.0'
|
||||
arch:
|
||||
platform: linux
|
||||
platform_os: rhel7
|
||||
target: x86_64
|
||||
compiler:
|
||||
name: gcc
|
||||
version: 8.3.0
|
||||
namespace: builtin.mock
|
||||
parameters:
|
||||
cflags: []
|
||||
cppflags: []
|
||||
cxxflags: []
|
||||
fflags: []
|
||||
ldflags: []
|
||||
ldlibs: []
|
||||
hash: iaapywazxgetn6gfv2cfba353qzzqvhn
|
||||
full_hash: qvsxvlmjaothtpjluqijv7qfnni3kyyg
|
||||
build_hash: iaapywazxgetn6gfv2cfba353qzzqvhy
|
||||
"""
|
||||
spec = Spec.from_yaml(yaml)
|
||||
concrete_spec = spec.concretized()
|
||||
assert concrete_spec.eq_dag(spec)
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
import spack.package
|
||||
import spack.spec
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.repo import RepoPath
|
||||
|
||||
@@ -189,7 +189,7 @@ def test_python_activation_view(tmpdir, python_and_extension_dirs,
|
||||
monkeypatch)
|
||||
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
@@ -216,7 +216,7 @@ def test_python_ignore_namespace_init_conflict(
|
||||
monkeypatch, py_namespace)
|
||||
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
@@ -251,7 +251,7 @@ def test_python_keep_namespace_init(
|
||||
monkeypatch, py_namespace)
|
||||
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
@@ -294,7 +294,7 @@ def test_python_namespace_conflict(tmpdir, namespace_extensions,
|
||||
monkeypatch, other_namespace)
|
||||
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
@@ -401,7 +401,7 @@ def test_perl_activation_view(tmpdir, perl_and_extension_dirs,
|
||||
'perl-extension', ext_prefix, perl_spec, monkeypatch)
|
||||
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
perl_pkg = perl_spec.package
|
||||
|
@@ -5,7 +5,7 @@
|
||||
|
||||
import os
|
||||
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -33,7 +33,7 @@ def test_global_activation(install_mockery, mock_fetch):
|
||||
|
||||
def test_remove_extensions_ordered(install_mockery, mock_fetch, tmpdir):
|
||||
view_dir = str(tmpdir.join('view'))
|
||||
layout = YamlDirectoryLayout(view_dir)
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
e2 = Spec('extension2').concretized()
|
||||
e2.package.do_install()
|
||||
|
@@ -14,7 +14,7 @@
|
||||
__all__ = ['load', 'dump', 'SpackJSONError']
|
||||
|
||||
_json_dump_args = {
|
||||
'indent': True,
|
||||
'indent': 2,
|
||||
'separators': (',', ': ')
|
||||
}
|
||||
|
||||
|
@@ -491,14 +491,14 @@ _spack_buildcache() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="create install list keys preview check download get-buildcache-name save-yaml copy sync update-index"
|
||||
SPACK_COMPREPLY="create install list keys preview check download get-buildcache-name save-specfile copy sync update-index"
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --rebuild-index -y --spec-yaml --only"
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --rebuild-index --spec-file --only"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -536,23 +536,23 @@ _spack_buildcache_preview() {
|
||||
}
|
||||
|
||||
_spack_buildcache_check() {
|
||||
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec -y --spec-yaml --rebuild-on-error"
|
||||
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file --rebuild-on-error"
|
||||
}
|
||||
|
||||
_spack_buildcache_download() {
|
||||
SPACK_COMPREPLY="-h --help -s --spec -y --spec-yaml -p --path -c --require-cdashid"
|
||||
SPACK_COMPREPLY="-h --help -s --spec --spec-file -p --path -c --require-cdashid"
|
||||
}
|
||||
|
||||
_spack_buildcache_get_buildcache_name() {
|
||||
SPACK_COMPREPLY="-h --help -s --spec -y --spec-yaml"
|
||||
SPACK_COMPREPLY="-h --help -s --spec --spec-file"
|
||||
}
|
||||
|
||||
_spack_buildcache_save_yaml() {
|
||||
SPACK_COMPREPLY="-h --help --root-spec --root-spec-yaml -s --specs -y --yaml-dir"
|
||||
_spack_buildcache_save_specfile() {
|
||||
SPACK_COMPREPLY="-h --help --root-spec --root-specfile -s --specs --specfile-dir"
|
||||
}
|
||||
|
||||
_spack_buildcache_copy() {
|
||||
SPACK_COMPREPLY="-h --help --base-dir --spec-yaml --destination-url"
|
||||
SPACK_COMPREPLY="-h --help --base-dir --spec-file --destination-url"
|
||||
}
|
||||
|
||||
_spack_buildcache_sync() {
|
||||
|
55
var/spack/repos/builtin.mock/packages/zlib/package.py
Normal file
55
var/spack/repos/builtin.mock/packages/zlib/package.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
# Although zlib comes with a configure script, it does not use Autotools
|
||||
# The AutotoolsPackage causes zlib to fail to build with PGI
|
||||
class Zlib(Package):
|
||||
"""A free, general-purpose, legally unencumbered lossless
|
||||
data-compression library.
|
||||
"""
|
||||
|
||||
homepage = "http://zlib.net"
|
||||
# URL must remain http:// so Spack can bootstrap curl
|
||||
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
|
||||
|
||||
version('1.2.11', sha256='c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1')
|
||||
# Due to the bug fixes, any installations of 1.2.9 or 1.2.10 should be
|
||||
# immediately replaced with 1.2.11.
|
||||
version('1.2.8', sha256='36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d')
|
||||
version('1.2.3', sha256='1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e')
|
||||
|
||||
variant('pic', default=True,
|
||||
description='Produce position-independent code (for shared libs)')
|
||||
variant('shared', default=True,
|
||||
description='Enables the build of shared libraries.')
|
||||
variant('optimize', default=True,
|
||||
description='Enable -O2 for a more optimized lib')
|
||||
|
||||
patch('w_patch.patch', when="@1.2.11%cce")
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
shared = '+shared' in self.spec
|
||||
return find_libraries(
|
||||
['libz'], root=self.prefix, recursive=True, shared=shared
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if '+pic' in self.spec:
|
||||
env.append_flags('CFLAGS', self.compiler.cc_pic_flag)
|
||||
if '+optimize' in self.spec:
|
||||
env.append_flags('CFLAGS', '-O2')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = []
|
||||
if '~shared' in spec:
|
||||
config_args.append('--static')
|
||||
configure('--prefix={0}'.format(prefix), *config_args)
|
||||
|
||||
make()
|
||||
if self.run_tests:
|
||||
make('check')
|
||||
make('install')
|
13
var/spack/repos/builtin.mock/packages/zlib/w_patch.patch
Normal file
13
var/spack/repos/builtin.mock/packages/zlib/w_patch.patch
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/configure b/configure
|
||||
index e974d1f..ed26a63 100755
|
||||
--- a/configure
|
||||
+++ b/configure
|
||||
@@ -409,7 +409,7 @@ EOF
|
||||
if test $shared -eq 1; then
|
||||
echo Checking for shared library support... | tee -a configure.log
|
||||
# we must test in two steps (cc then ld), required at least on SunOS 4.x
|
||||
- if try $CC -w -c $SFLAGS $test.c &&
|
||||
+ if try $CC -c $SFLAGS $test.c &&
|
||||
try $LDSHARED $SFLAGS -o $test$shared_ext $test.o; then
|
||||
echo Building shared library $SHAREDLIBV with $CC. | tee -a configure.log
|
||||
elif test -z "$old_cc" -a -z "$old_cflags"; then
|
Reference in New Issue
Block a user