Add command for reading a json-based DB description (#24894)

Adds `spack external read-cray-manifest`, which reads a json file that describes a set of package DAGs. The parsed results are stored directly in the database. A user can see these installed specs with `spack find` (like any installed spec). The easiest way to use them right now as dependencies is to run `spack spec ... ^/hash-of-external-package`.

Changes include:

* `spack external read-cray-manifest --file <path/to/file>` will add all specs described in the file to Spack's installation DB and will also install described compilers to the compilers configuration (the expected format of the file is described in this PR as well including examples of the file)
* Database records now may include an "origin" (the command added in this PR registers the origin as "external-db"). In the future, it is assumed users may want to be able to treat installs registered with this command differently (e.g. they may want to uninstall all specs added with this command)
* Hash properties are now always preserved when copying specs if the source spec is concrete
  * I don't think the hashes of installed-and-concrete specs should change and this was the easiest way to handle that
  * also specs that are concrete preserve their `.normal` property when copied (external specs may mention compilers that are not registered, and without this change they would fail in `normalize` when calling `validate_or_raise`)
  * it might be this should only be the case if the spec was installed

- [x] Improve testing
- [x] Specifically mark DB records added with this command (so that users can do something like "uninstall all packages added with `spack read-external-db`)
  * This is now possible with `spack uninstall --all --origin=external-db` (this will remove all specs added from manifest files)
- [x] Strip variants that are listed in json entries but don't actually exist for the package

Co-authored-by: Harmen Stoppels <harmenstoppels@gmail.com>
This commit is contained in:
Peter Scheibel 2022-03-18 17:07:22 -07:00 committed by GitHub
parent be0d611dc1
commit 531b1c5c3d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 712 additions and 41 deletions

View File

@ -5,6 +5,7 @@
from __future__ import print_function from __future__ import print_function
import argparse import argparse
import os
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
@ -13,6 +14,7 @@
import spack import spack
import spack.cmd import spack.cmd
import spack.cmd.common.arguments import spack.cmd.common.arguments
import spack.cray_manifest as cray_manifest
import spack.detection import spack.detection
import spack.error import spack.error
import spack.util.environment import spack.util.environment
@ -55,11 +57,31 @@ def setup_parser(subparser):
'list', help='list detectable packages, by repository and name' 'list', help='list detectable packages, by repository and name'
) )
read_cray_manifest = sp.add_parser(
'read-cray-manifest', help=(
"consume a Spack-compatible description of externally-installed "
"packages, including dependency relationships"
)
)
read_cray_manifest.add_argument(
'--file', default=None,
help="specify a location other than the default")
read_cray_manifest.add_argument(
'--directory', default=None,
help="specify a directory storing a group of manifest files")
read_cray_manifest.add_argument(
'--dry-run', action='store_true', default=False,
help="don't modify DB with files that are read")
def external_find(args): def external_find(args):
# If the user didn't specify anything, search for build tools by default # If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages: if not args.tags and not args.all and not args.packages:
args.tags = ['core-packages', 'build-tools'] args.tags = ['core-packages', 'build-tools']
# If the user calls 'spack external find' with no arguments, and
# this system has a description of installed packages, then we should
# consume it automatically.
_collect_and_consume_cray_manifest_files()
# If the user specified both --all and --tag, then --all has precedence # If the user specified both --all and --tag, then --all has precedence
if args.all and args.tags: if args.all and args.tags:
@ -104,6 +126,52 @@ def external_find(args):
tty.msg('No new external packages detected') tty.msg('No new external packages detected')
def external_read_cray_manifest(args):
_collect_and_consume_cray_manifest_files(
manifest_file=args.file,
manifest_directory=args.directory,
dry_run=args.dry_run
)
def _collect_and_consume_cray_manifest_files(
manifest_file=None, manifest_directory=None, dry_run=False):
manifest_files = []
if manifest_file:
manifest_files.append(manifest_file)
manifest_dirs = []
if manifest_directory:
manifest_dirs.append(manifest_directory)
if os.path.isdir(cray_manifest.default_path):
tty.debug(
"Cray manifest path {0} exists: collecting all files to read."
.format(cray_manifest.default_path))
manifest_dirs.append(cray_manifest.default_path)
else:
tty.debug("Default Cray manifest directory {0} does not exist."
.format(cray_manifest.default_path))
for directory in manifest_dirs:
for fname in os.listdir(directory):
manifest_files.append(os.path.join(directory, fname))
if not manifest_files:
raise ValueError(
"--file/--directory not specified, and no manifest found at {0}"
.format(cray_manifest.default_path))
for path in manifest_files:
try:
cray_manifest.read(path, not dry_run)
except (AssertionError, spack.error.SpackError):
# TODO: the AssertionError comes from class_for_compiler_name
# and should be transformed into a SpackError
tty.warn("Failure reading manifest file: {0}".format(path))
def external_list(args): def external_list(args):
# Trigger a read of all packages, might take a long time. # Trigger a read of all packages, might take a long time.
list(spack.repo.path.all_packages()) list(spack.repo.path.all_packages())
@ -115,5 +183,6 @@ def external_list(args):
def external(parser, args): def external(parser, args):
action = {'find': external_find, 'list': external_list} action = {'find': external_find, 'list': external_list,
'read-cray-manifest': external_read_cray_manifest}
action[args.external_command](args) action[args.external_command](args)

View File

@ -62,9 +62,14 @@ def setup_parser(subparser):
'-a', '--all', action='store_true', dest='all', '-a', '--all', action='store_true', dest='all',
help="remove ALL installed packages that match each supplied spec" help="remove ALL installed packages that match each supplied spec"
) )
subparser.add_argument(
'--origin', dest='origin',
help="only remove DB records with the specified origin"
)
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False): def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
origin=None):
"""Returns a list of specs matching the not necessarily """Returns a list of specs matching the not necessarily
concretized specs given from cli concretized specs given from cli
@ -85,8 +90,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
has_errors = False has_errors = False
for spec in specs: for spec in specs:
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED] install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
matching = spack.store.db.query_local(spec, hashes=hashes, matching = spack.store.db.query_local(
installed=install_query) spec, hashes=hashes, installed=install_query, origin=origin)
# For each spec provided, make sure it refers to only one package. # For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't # Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1: if not allow_multiple_matches and len(matching) > 1:
@ -240,7 +245,8 @@ def do_uninstall(env, specs, force):
def get_uninstall_list(args, specs, env): def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli # Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli # args.all takes care of the case where '-a' is given in the cli
uninstall_list = find_matching_specs(env, specs, args.all, args.force) uninstall_list = find_matching_specs(env, specs, args.all, args.force,
args.origin)
# Takes care of '-R' # Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env) active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)

View File

@ -0,0 +1,162 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json
import jsonschema
import six
import llnl.util.tty as tty
import spack.cmd
import spack.hash_types as hash_types
from spack.schema.cray_manifest import schema as manifest_schema
#: Cray systems can store a Spack-compatible description of system
#: packages here.
default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
def compiler_from_entry(entry):
compiler_name = entry['name']
paths = entry['executables']
version = entry['version']
arch = entry['arch']
operating_system = arch['os']
target = arch['target']
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
return compiler_cls(
spec, operating_system, target, paths
)
def spec_from_entry(entry):
arch_str = ""
if 'arch' in entry:
arch_format = "arch={platform}-{os}-{target}"
arch_str = arch_format.format(
platform=entry['arch']['platform'],
os=entry['arch']['platform_os'],
target=entry['arch']['target']['name']
)
compiler_str = ""
if 'compiler' in entry:
compiler_format = "%{name}@{version}"
compiler_str = compiler_format.format(
name=entry['compiler']['name'],
version=entry['compiler']['version']
)
spec_format = "{name}@{version} {compiler} {arch}"
spec_str = spec_format.format(
name=entry['name'],
version=entry['version'],
compiler=compiler_str,
arch=arch_str
)
package = spack.repo.get(entry['name'])
if 'parameters' in entry:
variant_strs = list()
for name, value in entry['parameters'].items():
# TODO: also ensure that the variant value is valid?
if not (name in package.variants):
tty.debug("Omitting variant {0} for entry {1}/{2}"
.format(name, entry['name'], entry['hash'][:7]))
continue
# Value could be a list (of strings), boolean, or string
if isinstance(value, six.string_types):
variant_strs.append('{0}={1}'.format(name, value))
else:
try:
iter(value)
variant_strs.append(
'{0}={1}'.format(name, ','.join(value)))
continue
except TypeError:
# Not an iterable
pass
# At this point not a string or collection, check for boolean
if value in [True, False]:
bool_symbol = '+' if value else '~'
variant_strs.append('{0}{1}'.format(bool_symbol, name))
else:
raise ValueError(
"Unexpected value for {0} ({1}): {2}".format(
name, str(type(value)), str(value)
)
)
spec_str += ' ' + ' '.join(variant_strs)
spec, = spack.cmd.parse_specs(spec_str.split())
for ht in [hash_types.dag_hash, hash_types.build_hash,
hash_types.full_hash]:
setattr(spec, ht.attr, entry['hash'])
spec._concrete = True
spec._hashes_final = True
spec.external_path = entry['prefix']
spec.origin = 'external-db'
spack.spec.Spec.ensure_valid_variants(spec)
return spec
def entries_to_specs(entries):
spec_dict = {}
for entry in entries:
try:
spec = spec_from_entry(entry)
spec_dict[spec._hash] = spec
except spack.repo.UnknownPackageError:
tty.debug("Omitting package {0}: no corresponding repo package"
.format(entry['name']))
except spack.error.SpackError:
raise
except Exception:
tty.warn("Could not parse entry: " + str(entry))
for entry in filter(lambda x: 'dependencies' in x, entries):
dependencies = entry['dependencies']
for name, properties in dependencies.items():
dep_hash = properties['hash']
deptypes = properties['type']
if dep_hash in spec_dict:
if entry['hash'] not in spec_dict:
continue
parent_spec = spec_dict[entry['hash']]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes)
return spec_dict
def read(path, apply_updates):
with open(path, 'r') as json_file:
json_data = json.load(json_file)
jsonschema.validate(json_data, manifest_schema)
specs = entries_to_specs(json_data['specs'])
tty.debug("{0}: {1} specs read from manifest".format(
path,
str(len(specs))))
compilers = list(compiler_from_entry(x)
for x in json_data['compilers'])
tty.debug("{0}: {1} compilers read from manifest".format(
path,
str(len(compilers))))
if apply_updates:
spack.compilers.add_compilers_to_config(
compilers, init_config=False)
for spec in specs.values():
spack.store.db.add(spec, directory_layout=None)

View File

@ -187,6 +187,7 @@ def __init__(
installation_time=None, installation_time=None,
deprecated_for=None, deprecated_for=None,
in_buildcache=False, in_buildcache=False,
origin=None
): ):
self.spec = spec self.spec = spec
self.path = str(path) if path else None self.path = str(path) if path else None
@ -196,6 +197,7 @@ def __init__(
self.installation_time = installation_time or _now() self.installation_time = installation_time or _now()
self.deprecated_for = deprecated_for self.deprecated_for = deprecated_for
self.in_buildcache = in_buildcache self.in_buildcache = in_buildcache
self.origin = origin
def install_type_matches(self, installed): def install_type_matches(self, installed):
installed = InstallStatuses.canonicalize(installed) installed = InstallStatuses.canonicalize(installed)
@ -217,6 +219,9 @@ def to_dict(self, include_fields=default_install_record_fields):
else: else:
rec_dict.update({field_name: getattr(self, field_name)}) rec_dict.update({field_name: getattr(self, field_name)})
if self.origin:
rec_dict['origin'] = self.origin
return rec_dict return rec_dict
@classmethod @classmethod
@ -1131,6 +1136,10 @@ def _add(
'explicit': explicit, 'explicit': explicit,
'installation_time': installation_time 'installation_time': installation_time
} }
# Commands other than 'spack install' may add specs to the DB,
# we can record the source of an installed Spec with 'origin'
if hasattr(spec, 'origin'):
extra_args['origin'] = spec.origin
self._data[key] = InstallRecord( self._data[key] = InstallRecord(
new_spec, path, installed, ref_count=0, **extra_args new_spec, path, installed, ref_count=0, **extra_args
) )
@ -1462,6 +1471,7 @@ def _query(
end_date=None, end_date=None,
hashes=None, hashes=None,
in_buildcache=any, in_buildcache=any,
origin=None
): ):
"""Run a query on the database.""" """Run a query on the database."""
@ -1490,6 +1500,9 @@ def _query(
if hashes is not None and rec.spec.dag_hash() not in hashes: if hashes is not None and rec.spec.dag_hash() not in hashes:
continue continue
if origin and not (origin == rec.origin):
continue
if not rec.install_type_matches(installed): if not rec.install_type_matches(installed):
continue continue

View File

@ -0,0 +1,130 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for Cray descriptive manifest: this describes a set of
installed packages on the system and also specifies dependency
relationships between them (so this provides more information than
external entries in packages configuration).
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
"""
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "CPE manifest schema",
"type": "object",
"additionalProperties": False,
"properties": {
"_meta": {
"type": "object",
"additionalProperties": False,
"properties": {
"file-type": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"system-type": {"type": "string", "minLength": 1}
}
},
"compilers": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"prefix": {"type": "string", "minLength": 1},
"executables": {
"type": "object",
"additionalProperties": False,
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
"fc": {"type": "string", "minLength": 1}
}
},
"arch": {
"type": "object",
"required": ["os", "target"],
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
"target": {"type": "string", "minLength": 1}
}
}
}
}
},
"specs": {
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"version",
"arch",
"compiler",
"prefix",
"hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"arch": {
"type": "object",
"required": ["platform", "platform_os", "target"],
"additioanlProperties": False,
"properties": {
"platform": {"type": "string", "minLength": 1},
"platform_os": {"type": "string", "minLength": 1},
"target": {
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {
"name": {"type": "string", "minLength": 1}
}
}
}
},
"compiler": {
"type": "object",
"required": ["name", "version"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1}
}
},
"dependencies": {
"type": "object",
"patternProperties": {
"\\w[\\w-]*": {
"type": "object",
"required": ["hash"],
"additionalProperties": False,
"properties": {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
"items": {
"type": "string", "minLength": 1}
}
}
}
}
},
"prefix": {
"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {
"type": "object",
}
}
}
}
}
}

View File

@ -1505,9 +1505,12 @@ def generate_possible_compilers(self, specs):
continue continue
if strict and s.compiler not in cspecs: if strict and s.compiler not in cspecs:
raise spack.concretize.UnavailableCompilerVersionError( if not s.concrete:
s.compiler raise spack.concretize.UnavailableCompilerVersionError(
) s.compiler
)
# Allow unknown compilers to exist if the associated spec
# is already built
else: else:
cspecs.add(s.compiler) cspecs.add(s.compiler)
self.gen.fact(fn.allow_compiler( self.gen.fact(fn.allow_compiler(

View File

@ -2911,7 +2911,7 @@ def concretized(self, tests=False):
if a list of names activate them for the packages in the list, if a list of names activate them for the packages in the list,
if True activate 'test' dependencies for all packages. if True activate 'test' dependencies for all packages.
""" """
clone = self.copy(caches=True) clone = self.copy()
clone.concretize(tests=tests) clone.concretize(tests=tests)
return clone return clone
@ -3210,8 +3210,8 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
"Attempting to normalize anonymous spec") "Attempting to normalize anonymous spec")
# Set _normal and _concrete to False when forced # Set _normal and _concrete to False when forced
if force: if force and not self._concrete:
self._mark_concrete(False) self._normal = False
if self._normal: if self._normal:
return False return False
@ -3680,7 +3680,7 @@ def patches(self):
return patches return patches
def _dup(self, other, deps=True, cleardeps=True, caches=None): def _dup(self, other, deps=True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting """Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default copy. It does not copy any dependents (parents), but by default
copies dependencies. copies dependencies.
@ -3695,10 +3695,6 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
cleardeps (bool): if True clears the dependencies of ``self``, cleardeps (bool): if True clears the dependencies of ``self``,
before possibly copying the dependencies of ``other`` onto before possibly copying the dependencies of ``other`` onto
``self`` ``self``
caches (bool or None): preserve cached fields such as
``_normal``, ``_hash``, and ``_dunder_hash``. By
default this is ``False`` if DAG structure would be
changed by the copy, ``True`` if it's an exact copy.
Returns: Returns:
True if ``self`` changed because of the copy operation, True if ``self`` changed because of the copy operation,
@ -3749,12 +3745,6 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
self.extra_attributes = other.extra_attributes self.extra_attributes = other.extra_attributes
self.namespace = other.namespace self.namespace = other.namespace
# Cached fields are results of expensive operations.
# If we preserved the original structure, we can copy them
# safely. If not, they need to be recomputed.
if caches is None:
caches = (deps is True or deps == dp.all_deptypes)
# If we copy dependencies, preserve DAG structure in the new spec # If we copy dependencies, preserve DAG structure in the new spec
if deps: if deps:
# If caller restricted deptypes to be copied, adjust that here. # If caller restricted deptypes to be copied, adjust that here.
@ -3762,29 +3752,31 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
deptypes = dp.all_deptypes deptypes = dp.all_deptypes
if isinstance(deps, (tuple, list)): if isinstance(deps, (tuple, list)):
deptypes = deps deptypes = deps
self._dup_deps(other, deptypes, caches) self._dup_deps(other, deptypes)
self._concrete = other._concrete self._concrete = other._concrete
self._hashes_final = other._hashes_final self._hashes_final = other._hashes_final
if caches: if self._concrete:
self._hash = other._hash self._hash = other._hash
self._build_hash = other._build_hash self._build_hash = other._build_hash
self._dunder_hash = other._dunder_hash self._dunder_hash = other._dunder_hash
self._normal = other._normal self._normal = True
self._full_hash = other._full_hash self._full_hash = other._full_hash
self._package_hash = other._package_hash self._package_hash = other._package_hash
else: else:
self._hash = None self._hash = None
self._build_hash = None self._build_hash = None
self._dunder_hash = None self._dunder_hash = None
# Note, we could use other._normal if we are copying all deps, but
# always set it False here to avoid the complexity of checking
self._normal = False self._normal = False
self._full_hash = None self._full_hash = None
self._package_hash = None self._package_hash = None
return changed return changed
def _dup_deps(self, other, deptypes, caches): def _dup_deps(self, other, deptypes):
def spid(spec): def spid(spec):
return id(spec) return id(spec)
@ -3795,11 +3787,11 @@ def spid(spec):
if spid(edge.parent) not in new_specs: if spid(edge.parent) not in new_specs:
new_specs[spid(edge.parent)] = edge.parent.copy( new_specs[spid(edge.parent)] = edge.parent.copy(
deps=False, caches=caches deps=False
) )
if spid(edge.spec) not in new_specs: if spid(edge.spec) not in new_specs:
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches) new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge( new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], edge.deptypes new_specs[spid(edge.spec)], edge.deptypes
@ -4635,22 +4627,19 @@ def multiple_specs(root):
# _dependents of these specs should not be trusted. # _dependents of these specs should not be trusted.
# Variants may also be ignored here for now... # Variants may also be ignored here for now...
# Keep all cached hashes because we will invalidate the ones that need
# invalidating later, and we don't want to invalidate unnecessarily
if transitive: if transitive:
self_nodes = dict((s.name, s.copy(deps=False, caches=True)) self_nodes = dict((s.name, s.copy(deps=False))
for s in self.traverse(root=True) for s in self.traverse(root=True)
if s.name not in other) if s.name not in other)
other_nodes = dict((s.name, s.copy(deps=False, caches=True)) other_nodes = dict((s.name, s.copy(deps=False))
for s in other.traverse(root=True)) for s in other.traverse(root=True))
else: else:
# If we're not doing a transitive splice, then we only want the # If we're not doing a transitive splice, then we only want the
# root of other. # root of other.
self_nodes = dict((s.name, s.copy(deps=False, caches=True)) self_nodes = dict((s.name, s.copy(deps=False))
for s in self.traverse(root=True) for s in self.traverse(root=True)
if s.name != other.name) if s.name != other.name)
other_nodes = {other.name: other.copy(deps=False, caches=True)} other_nodes = {other.name: other.copy(deps=False)}
nodes = other_nodes.copy() nodes = other_nodes.copy()
nodes.update(self_nodes) nodes.update(self_nodes)

View File

@ -788,7 +788,16 @@ def database(mock_store, mock_packages, config):
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def mutable_database(database, _store_dir_and_cache): def database_mutable_config(mock_store, mock_packages, mutable_config,
monkeypatch):
"""This activates the mock store, packages, AND config."""
with spack.store.use_store(str(mock_store)) as store:
yield store.db
store.db.last_seen_verifier = ''
@pytest.fixture(scope='function')
def mutable_database(database_mutable_config, _store_dir_and_cache):
"""Writeable version of the fixture, restored to its initial state """Writeable version of the fixture, restored to its initial state
after each test. after each test.
""" """
@ -796,7 +805,7 @@ def mutable_database(database, _store_dir_and_cache):
store_path, store_cache = _store_dir_and_cache store_path, store_cache = _store_dir_and_cache
store_path.join('.spack-db').chmod(mode=0o755, rec=1) store_path.join('.spack-db').chmod(mode=0o755, rec=1)
yield database yield database_mutable_config
# Restore the initial state by copying the content of the cache back into # Restore the initial state by copying the content of the cache back into
# the store and making the database read-only # the store and making the database read-only

View File

@ -0,0 +1,247 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json
import pytest
import spack
import spack.cray_manifest as cray_manifest
from spack.cray_manifest import compiler_from_entry, entries_to_specs
example_x_json_str = """\
{
"name": "packagex",
"hash": "hash-of-x",
"prefix": "/path/to/packagex-install/",
"version": "1.0",
"arch": {
"platform": "linux",
"platform_os": "centos8",
"target": {
"name": "haswell"
}
},
"compiler": {
"name": "gcc",
"version": "10.2.0"
},
"dependencies": {
"packagey": {
"hash": "hash-of-y",
"type": ["link"]
}
},
"parameters": {
"precision": ["double", "float"]
}
}
"""
example_compiler_entry = """\
{
"name": "gcc",
"prefix": "/path/to/compiler/",
"version": "7.5.0",
"arch": {
"os": "centos8",
"target": "x86_64"
},
"executables": {
"cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx",
"fc": "/path/to/compiler/fc"
}
}
"""
class JsonSpecEntry(object):
def __init__(self, name, hash, prefix, version, arch, compiler,
dependencies, parameters):
self.name = name
self.hash = hash
self.prefix = prefix
self.version = version
self.arch = arch
self.compiler = compiler
self.dependencies = dependencies
self.parameters = parameters
def to_dict(self):
return {
'name': self.name,
'hash': self.hash,
'prefix': self.prefix,
'version': self.version,
'arch': self.arch,
'compiler': self.compiler,
'dependencies': self.dependencies,
'parameters': self.parameters
}
def as_dependency(self, deptypes):
return (self.name,
{'hash': self.hash,
'type': list(deptypes)})
class JsonArchEntry(object):
def __init__(self, platform, os, target):
self.platform = platform
self.os = os
self.target = target
def to_dict(self):
return {
'platform': self.platform,
'platform_os': self.os,
'target': {
'name': self.target
}
}
class JsonCompilerEntry(object):
def __init__(self, name, version):
self.name = name
self.version = version
def to_dict(self):
return {
'name': self.name,
'version': self.version
}
_common_arch = JsonArchEntry(
platform='linux',
os='centos8',
target='haswell'
).to_dict()
_common_compiler = JsonCompilerEntry(
name='gcc',
version='10.2.0'
).to_dict()
def test_compatibility():
"""Make sure that JsonSpecEntry outputs the expected JSON structure
by comparing it with JSON parsed from an example string. This
ensures that the testing objects like JsonSpecEntry produce the
same JSON structure as the expected file format.
"""
y = JsonSpecEntry(
name='packagey',
hash='hash-of-y',
prefix='/path/to/packagey-install/',
version='1.0',
arch=_common_arch,
compiler=_common_compiler,
dependencies={},
parameters={}
)
x = JsonSpecEntry(
name='packagex',
hash='hash-of-x',
prefix='/path/to/packagex-install/',
version='1.0',
arch=_common_arch,
compiler=_common_compiler,
dependencies=dict([y.as_dependency(deptypes=['link'])]),
parameters={'precision': ['double', 'float']}
)
x_from_entry = x.to_dict()
x_from_str = json.loads(example_x_json_str)
assert x_from_entry == x_from_str
def test_compiler_from_entry():
compiler_data = json.loads(example_compiler_entry)
compiler_from_entry(compiler_data)
def generate_openmpi_entries():
"""Generate two example JSON entries that refer to an OpenMPI
installation and a hwloc dependency.
"""
# The hashes need to be padded with 'a' at the end to align with 8-byte
# boundaries (for base-32 decoding)
hwloc = JsonSpecEntry(
name='hwloc',
hash='hwlocfakehashaaa',
prefix='/path/to/hwloc-install/',
version='2.0.3',
arch=_common_arch,
compiler=_common_compiler,
dependencies={},
parameters={}
)
# This includes a variant which is guaranteed not to appear in the
# OpenMPI package: we need to make sure we can use such package
# descriptions.
openmpi = JsonSpecEntry(
name='openmpi',
hash='openmpifakehasha',
prefix='/path/to/openmpi-install/',
version='4.1.0',
arch=_common_arch,
compiler=_common_compiler,
dependencies=dict([hwloc.as_dependency(deptypes=['link'])]),
parameters={
'internal-hwloc': False,
'fabrics': ['psm'],
'missing_variant': True
}
)
return [openmpi, hwloc]
def test_spec_conversion():
"""Given JSON entries, check that we can form a set of Specs
including dependency references.
"""
entries = list(x.to_dict() for x in generate_openmpi_entries())
specs = entries_to_specs(entries)
openmpi_spec, = list(x for x in specs.values() if x.name == 'openmpi')
assert openmpi_spec['hwloc']
def _example_db():
return {
'specs': list(x.to_dict() for x in generate_openmpi_entries()),
'compilers': []
}
def test_read_cray_manifest(
tmpdir, mutable_config, mock_packages, mutable_database):
"""Check that (a) we can read the cray manifest and add it to the Spack
Database and (b) we can concretize specs based on that.
"""
if spack.config.get('config:concretizer') == 'clingo':
pytest.skip("The ASP-based concretizer currently doesn't support"
" dependency hash references - see #22613")
with tmpdir.as_cwd():
test_db_fname = 'external-db.json'
with open(test_db_fname, 'w') as db_file:
json.dump(_example_db(), db_file)
cray_manifest.read(test_db_fname, True)
query_specs = spack.store.db.query('openmpi')
assert any(x.dag_hash() == 'openmpifakehasha' for x in query_specs)
concretized_specs = spack.cmd.parse_specs(
'depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64'
' ^/openmpifakehasha'.split(),
concretize=True)
assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'

View File

@ -599,8 +599,6 @@ def test_copy_normalized(self):
assert orig == copy assert orig == copy
assert orig.eq_dag(copy) assert orig.eq_dag(copy)
assert orig._normal == copy._normal
assert orig._concrete == copy._concrete
# ensure no shared nodes bt/w orig and copy. # ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse()) orig_ids = set(id(s) for s in orig.traverse())

View File

@ -1017,7 +1017,7 @@ _spack_external() {
then then
SPACK_COMPREPLY="-h --help" SPACK_COMPREPLY="-h --help"
else else
SPACK_COMPREPLY="find list" SPACK_COMPREPLY="find list read-cray-manifest"
fi fi
} }
@ -1034,6 +1034,10 @@ _spack_external_list() {
SPACK_COMPREPLY="-h --help" SPACK_COMPREPLY="-h --help"
} }
_spack_external_read_cray_manifest() {
SPACK_COMPREPLY="-h --help --file --directory --dry-run"
}
_spack_fetch() { _spack_fetch() {
if $list_options if $list_options
then then
@ -1791,7 +1795,7 @@ _spack_undevelop() {
_spack_uninstall() { _spack_uninstall() {
if $list_options if $list_options
then then
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all" SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all --origin"
else else
_installed_packages _installed_packages
fi fi

View File

@ -0,0 +1,16 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DependsOnOpenmpi(Package):
"""For testing concretization of packages that use
`spack external read-cray-manifest`"""
depends_on('openmpi')
version('1.0')
version('0.9')

View File

@ -0,0 +1,10 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Hwloc(Package):
version('2.0.3')

View File

@ -0,0 +1,15 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Openmpi(Package):
version('4.1.1')
variant('internal-hwloc', default=False)
variant('fabrics', values=any_combination_of('psm', 'mxm'))
depends_on('hwloc', when="~internal-hwloc")