This reverts commit 531b1c5c3d
.
This commit is contained in:
parent
71c9334e30
commit
bfb6873ce3
@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@ -14,7 +13,6 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
@ -57,31 +55,11 @@ def setup_parser(subparser):
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
)
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
'read-cray-manifest', help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
)
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
'--file', default=None,
|
||||
help="specify a location other than the default")
|
||||
read_cray_manifest.add_argument(
|
||||
'--directory', default=None,
|
||||
help="specify a directory storing a group of manifest files")
|
||||
read_cray_manifest.add_argument(
|
||||
'--dry-run', action='store_true', default=False,
|
||||
help="don't modify DB with files that are read")
|
||||
|
||||
|
||||
def external_find(args):
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ['core-packages', 'build-tools']
|
||||
# If the user calls 'spack external find' with no arguments, and
|
||||
# this system has a description of installed packages, then we should
|
||||
# consume it automatically.
|
||||
_collect_and_consume_cray_manifest_files()
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
@ -126,52 +104,6 @@ def external_find(args):
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
||||
manifest_dirs = []
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read."
|
||||
.format(cray_manifest.default_path))
|
||||
manifest_dirs.append(cray_manifest.default_path)
|
||||
else:
|
||||
tty.debug("Default Cray manifest directory {0} does not exist."
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
|
||||
if not manifest_files:
|
||||
raise ValueError(
|
||||
"--file/--directory not specified, and no manifest found at {0}"
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (AssertionError, spack.error.SpackError):
|
||||
# TODO: the AssertionError comes from class_for_compiler_name
|
||||
# and should be transformed into a SpackError
|
||||
tty.warn("Failure reading manifest file: {0}".format(path))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
@ -183,6 +115,5 @@ def external_list(args):
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find, 'list': external_list,
|
||||
'read-cray-manifest': external_read_cray_manifest}
|
||||
action = {'find': external_find, 'list': external_list}
|
||||
action[args.external_command](args)
|
||||
|
@ -62,14 +62,9 @@ def setup_parser(subparser):
|
||||
'-a', '--all', action='store_true', dest='all',
|
||||
help="remove ALL installed packages that match each supplied spec"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--origin', dest='origin',
|
||||
help="only remove DB records with the specified origin"
|
||||
)
|
||||
|
||||
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
|
||||
origin=None):
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
@ -90,8 +85,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin)
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes,
|
||||
installed=install_query)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@ -245,8 +240,7 @@ def do_uninstall(env, specs, force):
|
||||
def get_uninstall_list(args, specs, env):
|
||||
# Gets the list of installed specs that match the ones give via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force,
|
||||
args.origin)
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
|
||||
|
||||
# Takes care of '-R'
|
||||
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
|
||||
|
@ -1,162 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.hash_types as hash_types
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
#: packages here.
|
||||
default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
compiler_name = entry['name']
|
||||
paths = entry['executables']
|
||||
version = entry['version']
|
||||
arch = entry['arch']
|
||||
operating_system = arch['os']
|
||||
target = arch['target']
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
|
||||
return compiler_cls(
|
||||
spec, operating_system, target, paths
|
||||
)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
arch_str = ""
|
||||
if 'arch' in entry:
|
||||
arch_format = "arch={platform}-{os}-{target}"
|
||||
arch_str = arch_format.format(
|
||||
platform=entry['arch']['platform'],
|
||||
os=entry['arch']['platform_os'],
|
||||
target=entry['arch']['target']['name']
|
||||
)
|
||||
|
||||
compiler_str = ""
|
||||
if 'compiler' in entry:
|
||||
compiler_format = "%{name}@{version}"
|
||||
compiler_str = compiler_format.format(
|
||||
name=entry['compiler']['name'],
|
||||
version=entry['compiler']['version']
|
||||
)
|
||||
|
||||
spec_format = "{name}@{version} {compiler} {arch}"
|
||||
spec_str = spec_format.format(
|
||||
name=entry['name'],
|
||||
version=entry['version'],
|
||||
compiler=compiler_str,
|
||||
arch=arch_str
|
||||
)
|
||||
|
||||
package = spack.repo.get(entry['name'])
|
||||
|
||||
if 'parameters' in entry:
|
||||
variant_strs = list()
|
||||
for name, value in entry['parameters'].items():
|
||||
# TODO: also ensure that the variant value is valid?
|
||||
if not (name in package.variants):
|
||||
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
||||
.format(name, entry['name'], entry['hash'][:7]))
|
||||
continue
|
||||
|
||||
# Value could be a list (of strings), boolean, or string
|
||||
if isinstance(value, six.string_types):
|
||||
variant_strs.append('{0}={1}'.format(name, value))
|
||||
else:
|
||||
try:
|
||||
iter(value)
|
||||
variant_strs.append(
|
||||
'{0}={1}'.format(name, ','.join(value)))
|
||||
continue
|
||||
except TypeError:
|
||||
# Not an iterable
|
||||
pass
|
||||
# At this point not a string or collection, check for boolean
|
||||
if value in [True, False]:
|
||||
bool_symbol = '+' if value else '~'
|
||||
variant_strs.append('{0}{1}'.format(bool_symbol, name))
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unexpected value for {0} ({1}): {2}".format(
|
||||
name, str(type(value)), str(value)
|
||||
)
|
||||
)
|
||||
spec_str += ' ' + ' '.join(variant_strs)
|
||||
|
||||
spec, = spack.cmd.parse_specs(spec_str.split())
|
||||
|
||||
for ht in [hash_types.dag_hash, hash_types.build_hash,
|
||||
hash_types.full_hash]:
|
||||
setattr(spec, ht.attr, entry['hash'])
|
||||
|
||||
spec._concrete = True
|
||||
spec._hashes_final = True
|
||||
spec.external_path = entry['prefix']
|
||||
spec.origin = 'external-db'
|
||||
spack.spec.Spec.ensure_valid_variants(spec)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def entries_to_specs(entries):
|
||||
spec_dict = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
spec = spec_from_entry(entry)
|
||||
spec_dict[spec._hash] = spec
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug("Omitting package {0}: no corresponding repo package"
|
||||
.format(entry['name']))
|
||||
except spack.error.SpackError:
|
||||
raise
|
||||
except Exception:
|
||||
tty.warn("Could not parse entry: " + str(entry))
|
||||
|
||||
for entry in filter(lambda x: 'dependencies' in x, entries):
|
||||
dependencies = entry['dependencies']
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties['hash']
|
||||
deptypes = properties['type']
|
||||
if dep_hash in spec_dict:
|
||||
if entry['hash'] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry['hash']]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
def read(path, apply_updates):
|
||||
with open(path, 'r') as json_file:
|
||||
json_data = json.load(json_file)
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
|
||||
specs = entries_to_specs(json_data['specs'])
|
||||
tty.debug("{0}: {1} specs read from manifest".format(
|
||||
path,
|
||||
str(len(specs))))
|
||||
compilers = list(compiler_from_entry(x)
|
||||
for x in json_data['compilers'])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||
path,
|
||||
str(len(compilers))))
|
||||
if apply_updates:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
compilers, init_config=False)
|
||||
for spec in specs.values():
|
||||
spack.store.db.add(spec, directory_layout=None)
|
@ -187,7 +187,6 @@ def __init__(
|
||||
installation_time=None,
|
||||
deprecated_for=None,
|
||||
in_buildcache=False,
|
||||
origin=None
|
||||
):
|
||||
self.spec = spec
|
||||
self.path = str(path) if path else None
|
||||
@ -197,7 +196,6 @@ def __init__(
|
||||
self.installation_time = installation_time or _now()
|
||||
self.deprecated_for = deprecated_for
|
||||
self.in_buildcache = in_buildcache
|
||||
self.origin = origin
|
||||
|
||||
def install_type_matches(self, installed):
|
||||
installed = InstallStatuses.canonicalize(installed)
|
||||
@ -219,9 +217,6 @@ def to_dict(self, include_fields=default_install_record_fields):
|
||||
else:
|
||||
rec_dict.update({field_name: getattr(self, field_name)})
|
||||
|
||||
if self.origin:
|
||||
rec_dict['origin'] = self.origin
|
||||
|
||||
return rec_dict
|
||||
|
||||
@classmethod
|
||||
@ -1136,10 +1131,6 @@ def _add(
|
||||
'explicit': explicit,
|
||||
'installation_time': installation_time
|
||||
}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, 'origin'):
|
||||
extra_args['origin'] = spec.origin
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec, path, installed, ref_count=0, **extra_args
|
||||
)
|
||||
@ -1471,7 +1462,6 @@ def _query(
|
||||
end_date=None,
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None
|
||||
):
|
||||
"""Run a query on the database."""
|
||||
|
||||
@ -1500,9 +1490,6 @@ def _query(
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
|
||||
if origin and not (origin == rec.origin):
|
||||
continue
|
||||
|
||||
if not rec.install_type_matches(installed):
|
||||
continue
|
||||
|
||||
|
@ -1,130 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for Cray descriptive manifest: this describes a set of
|
||||
installed packages on the system and also specifies dependency
|
||||
relationships between them (so this provides more information than
|
||||
external entries in packages configuration).
|
||||
|
||||
This does not specify a configuration - it is an input format
|
||||
that is consumed and transformed into Spack DB records.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/schema#",
|
||||
"title": "CPE manifest schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"_meta": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"file-type": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"executables": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"type": "string", "minLength": 1},
|
||||
"cxx": {"type": "string", "minLength": 1},
|
||||
"fc": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"os": {"type": "string", "minLength": 1},
|
||||
"target": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"specs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version",
|
||||
"arch",
|
||||
"compiler",
|
||||
"prefix",
|
||||
"hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["platform", "platform_os", "target"],
|
||||
"additioanlProperties": False,
|
||||
"properties": {
|
||||
"platform": {"type": "string", "minLength": 1},
|
||||
"platform_os": {"type": "string", "minLength": 1},
|
||||
"target": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"\\w[\\w-]*": {
|
||||
"type": "object",
|
||||
"required": ["hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"prefix": {
|
||||
"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1505,12 +1505,9 @@ def generate_possible_compilers(self, specs):
|
||||
continue
|
||||
|
||||
if strict and s.compiler not in cspecs:
|
||||
if not s.concrete:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
# Allow unknown compilers to exist if the associated spec
|
||||
# is already built
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
else:
|
||||
cspecs.add(s.compiler)
|
||||
self.gen.fact(fn.allow_compiler(
|
||||
|
@ -2911,7 +2911,7 @@ def concretized(self, tests=False):
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
clone = self.copy()
|
||||
clone = self.copy(caches=True)
|
||||
clone.concretize(tests=tests)
|
||||
return clone
|
||||
|
||||
@ -3210,8 +3210,8 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
||||
"Attempting to normalize anonymous spec")
|
||||
|
||||
# Set _normal and _concrete to False when forced
|
||||
if force and not self._concrete:
|
||||
self._normal = False
|
||||
if force:
|
||||
self._mark_concrete(False)
|
||||
|
||||
if self._normal:
|
||||
return False
|
||||
@ -3680,7 +3680,7 @@ def patches(self):
|
||||
|
||||
return patches
|
||||
|
||||
def _dup(self, other, deps=True, cleardeps=True):
|
||||
def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||
"""Copy the spec other into self. This is an overwriting
|
||||
copy. It does not copy any dependents (parents), but by default
|
||||
copies dependencies.
|
||||
@ -3695,6 +3695,10 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
cleardeps (bool): if True clears the dependencies of ``self``,
|
||||
before possibly copying the dependencies of ``other`` onto
|
||||
``self``
|
||||
caches (bool or None): preserve cached fields such as
|
||||
``_normal``, ``_hash``, and ``_dunder_hash``. By
|
||||
default this is ``False`` if DAG structure would be
|
||||
changed by the copy, ``True`` if it's an exact copy.
|
||||
|
||||
Returns:
|
||||
True if ``self`` changed because of the copy operation,
|
||||
@ -3745,6 +3749,12 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
self.extra_attributes = other.extra_attributes
|
||||
self.namespace = other.namespace
|
||||
|
||||
# Cached fields are results of expensive operations.
|
||||
# If we preserved the original structure, we can copy them
|
||||
# safely. If not, they need to be recomputed.
|
||||
if caches is None:
|
||||
caches = (deps is True or deps == dp.all_deptypes)
|
||||
|
||||
# If we copy dependencies, preserve DAG structure in the new spec
|
||||
if deps:
|
||||
# If caller restricted deptypes to be copied, adjust that here.
|
||||
@ -3752,31 +3762,29 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
deptypes = dp.all_deptypes
|
||||
if isinstance(deps, (tuple, list)):
|
||||
deptypes = deps
|
||||
self._dup_deps(other, deptypes)
|
||||
self._dup_deps(other, deptypes, caches)
|
||||
|
||||
self._concrete = other._concrete
|
||||
self._hashes_final = other._hashes_final
|
||||
|
||||
if self._concrete:
|
||||
if caches:
|
||||
self._hash = other._hash
|
||||
self._build_hash = other._build_hash
|
||||
self._dunder_hash = other._dunder_hash
|
||||
self._normal = True
|
||||
self._normal = other._normal
|
||||
self._full_hash = other._full_hash
|
||||
self._package_hash = other._package_hash
|
||||
else:
|
||||
self._hash = None
|
||||
self._build_hash = None
|
||||
self._dunder_hash = None
|
||||
# Note, we could use other._normal if we are copying all deps, but
|
||||
# always set it False here to avoid the complexity of checking
|
||||
self._normal = False
|
||||
self._full_hash = None
|
||||
self._package_hash = None
|
||||
|
||||
return changed
|
||||
|
||||
def _dup_deps(self, other, deptypes):
|
||||
def _dup_deps(self, other, deptypes, caches):
|
||||
def spid(spec):
|
||||
return id(spec)
|
||||
|
||||
@ -3787,11 +3795,11 @@ def spid(spec):
|
||||
|
||||
if spid(edge.parent) not in new_specs:
|
||||
new_specs[spid(edge.parent)] = edge.parent.copy(
|
||||
deps=False
|
||||
deps=False, caches=caches
|
||||
)
|
||||
|
||||
if spid(edge.spec) not in new_specs:
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], edge.deptypes
|
||||
@ -4627,19 +4635,22 @@ def multiple_specs(root):
|
||||
# _dependents of these specs should not be trusted.
|
||||
# Variants may also be ignored here for now...
|
||||
|
||||
# Keep all cached hashes because we will invalidate the ones that need
|
||||
# invalidating later, and we don't want to invalidate unnecessarily
|
||||
|
||||
if transitive:
|
||||
self_nodes = dict((s.name, s.copy(deps=False))
|
||||
self_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
for s in self.traverse(root=True)
|
||||
if s.name not in other)
|
||||
other_nodes = dict((s.name, s.copy(deps=False))
|
||||
other_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
for s in other.traverse(root=True))
|
||||
else:
|
||||
# If we're not doing a transitive splice, then we only want the
|
||||
# root of other.
|
||||
self_nodes = dict((s.name, s.copy(deps=False))
|
||||
self_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
for s in self.traverse(root=True)
|
||||
if s.name != other.name)
|
||||
other_nodes = {other.name: other.copy(deps=False)}
|
||||
other_nodes = {other.name: other.copy(deps=False, caches=True)}
|
||||
|
||||
nodes = other_nodes.copy()
|
||||
nodes.update(self_nodes)
|
||||
|
@ -788,16 +788,7 @@ def database(mock_store, mock_packages, config):
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def database_mutable_config(mock_store, mock_packages, mutable_config,
|
||||
monkeypatch):
|
||||
"""This activates the mock store, packages, AND config."""
|
||||
with spack.store.use_store(str(mock_store)) as store:
|
||||
yield store.db
|
||||
store.db.last_seen_verifier = ''
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_database(database_mutable_config, _store_dir_and_cache):
|
||||
def mutable_database(database, _store_dir_and_cache):
|
||||
"""Writeable version of the fixture, restored to its initial state
|
||||
after each test.
|
||||
"""
|
||||
@ -805,7 +796,7 @@ def mutable_database(database_mutable_config, _store_dir_and_cache):
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store_path.join('.spack-db').chmod(mode=0o755, rec=1)
|
||||
|
||||
yield database_mutable_config
|
||||
yield database
|
||||
|
||||
# Restore the initial state by copying the content of the cache back into
|
||||
# the store and making the database read-only
|
||||
|
@ -1,247 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
import spack
|
||||
import spack.cray_manifest as cray_manifest
|
||||
from spack.cray_manifest import compiler_from_entry, entries_to_specs
|
||||
|
||||
example_x_json_str = """\
|
||||
{
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "linux",
|
||||
"platform_os": "centos8",
|
||||
"target": {
|
||||
"name": "haswell"
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
"hash": "hash-of-y",
|
||||
"type": ["link"]
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"precision": ["double", "float"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
example_compiler_entry = """\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class JsonSpecEntry(object):
|
||||
def __init__(self, name, hash, prefix, version, arch, compiler,
|
||||
dependencies, parameters):
|
||||
self.name = name
|
||||
self.hash = hash
|
||||
self.prefix = prefix
|
||||
self.version = version
|
||||
self.arch = arch
|
||||
self.compiler = compiler
|
||||
self.dependencies = dependencies
|
||||
self.parameters = parameters
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'hash': self.hash,
|
||||
'prefix': self.prefix,
|
||||
'version': self.version,
|
||||
'arch': self.arch,
|
||||
'compiler': self.compiler,
|
||||
'dependencies': self.dependencies,
|
||||
'parameters': self.parameters
|
||||
}
|
||||
|
||||
def as_dependency(self, deptypes):
|
||||
return (self.name,
|
||||
{'hash': self.hash,
|
||||
'type': list(deptypes)})
|
||||
|
||||
|
||||
class JsonArchEntry(object):
|
||||
def __init__(self, platform, os, target):
|
||||
self.platform = platform
|
||||
self.os = os
|
||||
self.target = target
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'platform': self.platform,
|
||||
'platform_os': self.os,
|
||||
'target': {
|
||||
'name': self.target
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class JsonCompilerEntry(object):
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.version = version
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'version': self.version
|
||||
}
|
||||
|
||||
|
||||
_common_arch = JsonArchEntry(
|
||||
platform='linux',
|
||||
os='centos8',
|
||||
target='haswell'
|
||||
).to_dict()
|
||||
|
||||
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name='gcc',
|
||||
version='10.2.0'
|
||||
).to_dict()
|
||||
|
||||
|
||||
def test_compatibility():
|
||||
"""Make sure that JsonSpecEntry outputs the expected JSON structure
|
||||
by comparing it with JSON parsed from an example string. This
|
||||
ensures that the testing objects like JsonSpecEntry produce the
|
||||
same JSON structure as the expected file format.
|
||||
"""
|
||||
y = JsonSpecEntry(
|
||||
name='packagey',
|
||||
hash='hash-of-y',
|
||||
prefix='/path/to/packagey-install/',
|
||||
version='1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler,
|
||||
dependencies={},
|
||||
parameters={}
|
||||
)
|
||||
|
||||
x = JsonSpecEntry(
|
||||
name='packagex',
|
||||
hash='hash-of-x',
|
||||
prefix='/path/to/packagex-install/',
|
||||
version='1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler,
|
||||
dependencies=dict([y.as_dependency(deptypes=['link'])]),
|
||||
parameters={'precision': ['double', 'float']}
|
||||
)
|
||||
|
||||
x_from_entry = x.to_dict()
|
||||
x_from_str = json.loads(example_x_json_str)
|
||||
assert x_from_entry == x_from_str
|
||||
|
||||
|
||||
def test_compiler_from_entry():
|
||||
compiler_data = json.loads(example_compiler_entry)
|
||||
compiler_from_entry(compiler_data)
|
||||
|
||||
|
||||
def generate_openmpi_entries():
|
||||
"""Generate two example JSON entries that refer to an OpenMPI
|
||||
installation and a hwloc dependency.
|
||||
"""
|
||||
# The hashes need to be padded with 'a' at the end to align with 8-byte
|
||||
# boundaries (for base-32 decoding)
|
||||
hwloc = JsonSpecEntry(
|
||||
name='hwloc',
|
||||
hash='hwlocfakehashaaa',
|
||||
prefix='/path/to/hwloc-install/',
|
||||
version='2.0.3',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler,
|
||||
dependencies={},
|
||||
parameters={}
|
||||
)
|
||||
|
||||
# This includes a variant which is guaranteed not to appear in the
|
||||
# OpenMPI package: we need to make sure we can use such package
|
||||
# descriptions.
|
||||
openmpi = JsonSpecEntry(
|
||||
name='openmpi',
|
||||
hash='openmpifakehasha',
|
||||
prefix='/path/to/openmpi-install/',
|
||||
version='4.1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler,
|
||||
dependencies=dict([hwloc.as_dependency(deptypes=['link'])]),
|
||||
parameters={
|
||||
'internal-hwloc': False,
|
||||
'fabrics': ['psm'],
|
||||
'missing_variant': True
|
||||
}
|
||||
)
|
||||
|
||||
return [openmpi, hwloc]
|
||||
|
||||
|
||||
def test_spec_conversion():
|
||||
"""Given JSON entries, check that we can form a set of Specs
|
||||
including dependency references.
|
||||
"""
|
||||
entries = list(x.to_dict() for x in generate_openmpi_entries())
|
||||
specs = entries_to_specs(entries)
|
||||
openmpi_spec, = list(x for x in specs.values() if x.name == 'openmpi')
|
||||
assert openmpi_spec['hwloc']
|
||||
|
||||
|
||||
def _example_db():
|
||||
return {
|
||||
'specs': list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
'compilers': []
|
||||
}
|
||||
|
||||
|
||||
def test_read_cray_manifest(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
"""Check that (a) we can read the cray manifest and add it to the Spack
|
||||
Database and (b) we can concretize specs based on that.
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
pytest.skip("The ASP-based concretizer currently doesn't support"
|
||||
" dependency hash references - see #22613")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = 'external-db.json'
|
||||
with open(test_db_fname, 'w') as db_file:
|
||||
json.dump(_example_db(), db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.db.query('openmpi')
|
||||
assert any(x.dag_hash() == 'openmpifakehasha' for x in query_specs)
|
||||
|
||||
concretized_specs = spack.cmd.parse_specs(
|
||||
'depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64'
|
||||
' ^/openmpifakehasha'.split(),
|
||||
concretize=True)
|
||||
assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
|
@ -599,6 +599,8 @@ def test_copy_normalized(self):
|
||||
|
||||
assert orig == copy
|
||||
assert orig.eq_dag(copy)
|
||||
assert orig._normal == copy._normal
|
||||
assert orig._concrete == copy._concrete
|
||||
|
||||
# ensure no shared nodes bt/w orig and copy.
|
||||
orig_ids = set(id(s) for s in orig.traverse())
|
||||
|
@ -1017,7 +1017,7 @@ _spack_external() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="find list read-cray-manifest"
|
||||
SPACK_COMPREPLY="find list"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -1034,10 +1034,6 @@ _spack_external_list() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
}
|
||||
|
||||
_spack_external_read_cray_manifest() {
|
||||
SPACK_COMPREPLY="-h --help --file --directory --dry-run"
|
||||
}
|
||||
|
||||
_spack_fetch() {
|
||||
if $list_options
|
||||
then
|
||||
@ -1795,7 +1791,7 @@ _spack_undevelop() {
|
||||
_spack_uninstall() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all --origin"
|
||||
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
|
@ -1,16 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class DependsOnOpenmpi(Package):
|
||||
"""For testing concretization of packages that use
|
||||
`spack external read-cray-manifest`"""
|
||||
|
||||
depends_on('openmpi')
|
||||
|
||||
version('1.0')
|
||||
version('0.9')
|
@ -1,10 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Hwloc(Package):
|
||||
version('2.0.3')
|
@ -1,15 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Openmpi(Package):
|
||||
version('4.1.1')
|
||||
|
||||
variant('internal-hwloc', default=False)
|
||||
variant('fabrics', values=any_combination_of('psm', 'mxm'))
|
||||
|
||||
depends_on('hwloc', when="~internal-hwloc")
|
Loading…
Reference in New Issue
Block a user