More spec improvements
- Spec.copy() does not create superfluous nodes and preserves DAG connections. - Spec.normalize() doesn't create extra dependency nodes or throw out old ones like before. - Added better test cases for above changes. Minor things: - Fixed bug waiting to happen in PackageDB.get() - instances was keyed by name, not by spec, so caching wasn't really working at all. - removed unused PackageDB.compute_dependents function. - Fixed PackageDB.graph_dependencies() so that spack graph works again.
This commit is contained in:
parent
63f8af8078
commit
5f073ae220
@ -29,7 +29,7 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
|
||||||
description = "Show dependent packages."
|
description = "Show installed packages that depend on another."
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
@ -42,5 +42,5 @@ def dependents(parser, args):
|
|||||||
tty.die("spack dependents takes only one spec.")
|
tty.die("spack dependents takes only one spec.")
|
||||||
|
|
||||||
fmt = '$_$@$%@$+$=$#'
|
fmt = '$_$@$%@$+$=$#'
|
||||||
deps = [d.format(fmt) for d in specs[0].package.installed_dependents]
|
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
|
||||||
tty.msg("Dependents of %s" % specs[0].format(fmt), *deps)
|
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
|
||||||
|
@ -69,9 +69,9 @@ def get(self, spec):
|
|||||||
|
|
||||||
if not spec in self.instances:
|
if not spec in self.instances:
|
||||||
package_class = self.get_class_for_package_name(spec.name)
|
package_class = self.get_class_for_package_name(spec.name)
|
||||||
self.instances[spec.name] = package_class(spec)
|
self.instances[spec.copy()] = package_class(spec)
|
||||||
|
|
||||||
return self.instances[spec.name]
|
return self.instances[spec]
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
@_autospec
|
||||||
@ -179,24 +179,6 @@ def get_class_for_package_name(self, pkg_name):
|
|||||||
return cls
|
return cls
|
||||||
|
|
||||||
|
|
||||||
def compute_dependents(self):
|
|
||||||
"""Reads in all package files and sets dependence information on
|
|
||||||
Package objects in memory.
|
|
||||||
"""
|
|
||||||
if not hasattr(compute_dependents, index):
|
|
||||||
compute_dependents.index = {}
|
|
||||||
|
|
||||||
for pkg in all_packages():
|
|
||||||
if pkg._dependents is None:
|
|
||||||
pkg._dependents = []
|
|
||||||
|
|
||||||
for name, dep in pkg.dependencies.iteritems():
|
|
||||||
dpkg = self.get(name)
|
|
||||||
if dpkg._dependents is None:
|
|
||||||
dpkg._dependents = []
|
|
||||||
dpkg._dependents.append(pkg.name)
|
|
||||||
|
|
||||||
|
|
||||||
def graph_dependencies(self, out=sys.stdout):
|
def graph_dependencies(self, out=sys.stdout):
|
||||||
"""Print out a graph of all the dependencies between package.
|
"""Print out a graph of all the dependencies between package.
|
||||||
Graph is in dot format."""
|
Graph is in dot format."""
|
||||||
@ -211,10 +193,17 @@ def quote(string):
|
|||||||
return '"%s"' % string
|
return '"%s"' % string
|
||||||
|
|
||||||
deps = []
|
deps = []
|
||||||
for pkg in all_packages():
|
for pkg in self.all_packages():
|
||||||
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
|
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
|
||||||
|
|
||||||
|
# Add edges for each depends_on in the package.
|
||||||
for dep_name, dep in pkg.dependencies.iteritems():
|
for dep_name, dep in pkg.dependencies.iteritems():
|
||||||
deps.append((pkg.name, dep_name))
|
deps.append((pkg.name, dep_name))
|
||||||
|
|
||||||
|
# If the package provides something, add an edge for that.
|
||||||
|
for provider in set(p.name for p in pkg.provided):
|
||||||
|
deps.append((provider, pkg.name))
|
||||||
|
|
||||||
out.write('\n')
|
out.write('\n')
|
||||||
|
|
||||||
for pair in deps:
|
for pair in deps:
|
||||||
|
@ -310,9 +310,8 @@ def concrete(self):
|
|||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
sorted_dep_names = sorted(self.keys())
|
|
||||||
return ''.join(
|
return ''.join(
|
||||||
["^" + str(self[name]) for name in sorted_dep_names])
|
["^" + str(self[name]) for name in sorted(self.keys())])
|
||||||
|
|
||||||
|
|
||||||
@key_ordering
|
@key_ordering
|
||||||
@ -562,10 +561,9 @@ def dep_hash(self, length=None):
|
|||||||
"""Return a hash representing all dependencies of this spec
|
"""Return a hash representing all dependencies of this spec
|
||||||
(direct and indirect).
|
(direct and indirect).
|
||||||
|
|
||||||
This always normalizes first so that hash is consistent.
|
If you want this hash to be consistent, you should
|
||||||
|
concretize the spec first so that it is not ambiguous.
|
||||||
"""
|
"""
|
||||||
self.normalize()
|
|
||||||
|
|
||||||
sha = hashlib.sha1()
|
sha = hashlib.sha1()
|
||||||
sha.update(self.dep_string())
|
sha.update(self.dep_string())
|
||||||
full_hash = sha.hexdigest()
|
full_hash = sha.hexdigest()
|
||||||
@ -641,7 +639,7 @@ def _expand_virtual_packages(self):
|
|||||||
spec._replace_with(concrete)
|
spec._replace_with(concrete)
|
||||||
|
|
||||||
# If there are duplicate providers or duplicate provider deps, this
|
# If there are duplicate providers or duplicate provider deps, this
|
||||||
# consolidates them and merges constraints.
|
# consolidates them and merge constraints.
|
||||||
self.normalize(force=True)
|
self.normalize(force=True)
|
||||||
|
|
||||||
|
|
||||||
@ -675,43 +673,51 @@ def concretized(self):
|
|||||||
return clone
|
return clone
|
||||||
|
|
||||||
|
|
||||||
def flat_dependencies(self):
|
def flat_dependencies(self, **kwargs):
|
||||||
"""Return a DependencyMap containing all of this spec's dependencies
|
"""Return a DependencyMap containing all of this spec's
|
||||||
with their constraints merged. If there are any conflicts, throw
|
dependencies with their constraints merged.
|
||||||
an exception.
|
|
||||||
|
|
||||||
This will work even on specs that are not normalized; i.e. specs
|
If copy is True, returns merged copies of its dependencies
|
||||||
that have two instances of the same dependency in the DAG.
|
without modifying the spec it's called on.
|
||||||
This is the first step of normalization.
|
|
||||||
|
If copy is False, clears this spec's dependencies and
|
||||||
|
returns them.
|
||||||
"""
|
"""
|
||||||
# Once that is guaranteed, we know any constraint violations are due
|
copy = kwargs.get('copy', True)
|
||||||
# to the spec -- so they're the user's fault, not Spack's.
|
|
||||||
flat_deps = DependencyMap()
|
flat_deps = DependencyMap()
|
||||||
try:
|
try:
|
||||||
for spec in self.traverse(root=False):
|
for spec in self.traverse(root=False):
|
||||||
if spec.name not in flat_deps:
|
if spec.name not in flat_deps:
|
||||||
new_spec = spec.copy(deps=False)
|
if copy:
|
||||||
flat_deps[spec.name] = new_spec
|
flat_deps[spec.name] = spec.copy(deps=False)
|
||||||
|
else:
|
||||||
|
flat_deps[spec.name] = spec
|
||||||
else:
|
else:
|
||||||
flat_deps[spec.name].constrain(spec)
|
flat_deps[spec.name].constrain(spec)
|
||||||
|
|
||||||
except UnsatisfiableSpecError, e:
|
if not copy:
|
||||||
# This REALLY shouldn't happen unless something is wrong in spack.
|
for dep in flat_deps.values():
|
||||||
# It means we got a spec DAG with two instances of the same package
|
dep.dependencies.clear()
|
||||||
# that had inconsistent constraints. There's no way for a user to
|
dep.dependents.clear()
|
||||||
# produce a spec like this (the parser adds all deps to the root),
|
self.dependencies.clear()
|
||||||
# so this means OUR code is not sane!
|
|
||||||
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
|
|
||||||
|
|
||||||
return flat_deps
|
return flat_deps
|
||||||
|
|
||||||
|
except UnsatisfiableSpecError, e:
|
||||||
|
# Here, the DAG contains two instances of the same package
|
||||||
|
# with inconsistent constraints. Users cannot produce
|
||||||
|
# inconsistent specs like this on the command line: the
|
||||||
|
# parser doesn't allow it. Spack must be broken!
|
||||||
|
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
|
||||||
|
|
||||||
|
|
||||||
def flatten(self):
|
def flatten(self):
|
||||||
"""Pull all dependencies up to the root (this spec).
|
"""Pull all dependencies up to the root (this spec).
|
||||||
Merge constraints for dependencies with the same name, and if they
|
Merge constraints for dependencies with the same name, and if they
|
||||||
conflict, throw an exception. """
|
conflict, throw an exception. """
|
||||||
self.dependencies = self.flat_dependencies()
|
for dep in self.flat_dependencies(copy=False):
|
||||||
|
self._add_dependency(dep)
|
||||||
|
|
||||||
|
|
||||||
def _normalize_helper(self, visited, spec_deps, provider_index):
|
def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||||
@ -819,8 +825,7 @@ def normalize(self, **kwargs):
|
|||||||
self.package.validate_dependencies()
|
self.package.validate_dependencies()
|
||||||
|
|
||||||
# Get all the dependencies into one DependencyMap
|
# Get all the dependencies into one DependencyMap
|
||||||
spec_deps = self.flat_dependencies()
|
spec_deps = self.flat_dependencies(copy=False)
|
||||||
self.dependencies.clear()
|
|
||||||
|
|
||||||
# Figure out which of the user-provided deps provide virtual deps.
|
# Figure out which of the user-provided deps provide virtual deps.
|
||||||
# Remove virtual deps that are already provided by something in the spec
|
# Remove virtual deps that are already provided by something in the spec
|
||||||
@ -1037,22 +1042,29 @@ def _dup(self, other, **kwargs):
|
|||||||
Whether deps should be copied too. Set to false to copy a
|
Whether deps should be copied too. Set to false to copy a
|
||||||
spec but not its dependencies.
|
spec but not its dependencies.
|
||||||
"""
|
"""
|
||||||
# TODO: this needs to handle DAGs.
|
# Local node attributes get copied first.
|
||||||
self.name = other.name
|
self.name = other.name
|
||||||
self.versions = other.versions.copy()
|
self.versions = other.versions.copy()
|
||||||
self.variants = other.variants.copy()
|
self.variants = other.variants.copy()
|
||||||
self.architecture = other.architecture
|
self.architecture = other.architecture
|
||||||
self.compiler = None
|
self.compiler = other.compiler.copy() if other.compiler else None
|
||||||
if other.compiler:
|
|
||||||
self.compiler = other.compiler.copy()
|
|
||||||
|
|
||||||
self.dependents = DependencyMap()
|
self.dependents = DependencyMap()
|
||||||
copy_deps = kwargs.get('deps', True)
|
self.dependencies = DependencyMap()
|
||||||
if copy_deps:
|
|
||||||
self.dependencies = other.dependencies.copy()
|
|
||||||
else:
|
|
||||||
self.dependencies = DependencyMap()
|
|
||||||
|
|
||||||
|
# If we copy dependencies, preserve DAG structure in the new spec
|
||||||
|
if kwargs.get('deps', True):
|
||||||
|
# This copies the deps from other using _dup(deps=False)
|
||||||
|
new_nodes = other.flat_dependencies()
|
||||||
|
new_nodes[self.name] = self
|
||||||
|
|
||||||
|
# Hook everything up properly here by traversing.
|
||||||
|
for spec in other.traverse(cover='nodes'):
|
||||||
|
parent = new_nodes[spec.name]
|
||||||
|
for child in spec.dependencies:
|
||||||
|
if child not in parent.dependencies:
|
||||||
|
parent._add_dependency(new_nodes[child])
|
||||||
|
|
||||||
|
# Since we preserved structure, we can copy _normal safely.
|
||||||
self._normal = other._normal
|
self._normal = other._normal
|
||||||
self._concrete = other._concrete
|
self._concrete = other._concrete
|
||||||
|
|
||||||
|
@ -29,10 +29,12 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
|
from contextlib import closing
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
from spack.spec import Spec
|
||||||
from spack.packages import PackageDB
|
from spack.packages import PackageDB
|
||||||
from spack.directory_layout import SpecHashDirectoryLayout
|
from spack.directory_layout import SpecHashDirectoryLayout
|
||||||
|
|
||||||
@ -83,6 +85,18 @@ def test_read_and_write_spec(self):
|
|||||||
# Make sure spec file can be read back in to get the original spec
|
# Make sure spec file can be read back in to get the original spec
|
||||||
spec_from_file = self.layout.read_spec(spec_path)
|
spec_from_file = self.layout.read_spec(spec_path)
|
||||||
self.assertEqual(spec, spec_from_file)
|
self.assertEqual(spec, spec_from_file)
|
||||||
|
self.assertTrue(spec.eq_dag, spec_from_file)
|
||||||
|
self.assertTrue(spec_from_file.concrete)
|
||||||
|
|
||||||
|
# Ensure that specs that come out "normal" are really normal.
|
||||||
|
with closing(open(spec_path)) as spec_file:
|
||||||
|
read_separately = Spec(spec_file.read())
|
||||||
|
|
||||||
|
read_separately.normalize()
|
||||||
|
self.assertEqual(read_separately, spec_from_file)
|
||||||
|
|
||||||
|
read_separately.concretize()
|
||||||
|
self.assertEqual(read_separately, spec_from_file)
|
||||||
|
|
||||||
# Make sure the dep hash of the read-in spec is the same
|
# Make sure the dep hash of the read-in spec is the same
|
||||||
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
|
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
|
||||||
|
@ -392,3 +392,56 @@ def test_contains(self):
|
|||||||
self.assertIn(Spec('libdwarf'), spec)
|
self.assertIn(Spec('libdwarf'), spec)
|
||||||
self.assertNotIn(Spec('libgoblin'), spec)
|
self.assertNotIn(Spec('libgoblin'), spec)
|
||||||
self.assertIn(Spec('mpileaks'), spec)
|
self.assertIn(Spec('mpileaks'), spec)
|
||||||
|
|
||||||
|
|
||||||
|
def test_copy_simple(self):
|
||||||
|
orig = Spec('mpileaks')
|
||||||
|
copy = orig.copy()
|
||||||
|
|
||||||
|
self.check_links(copy)
|
||||||
|
|
||||||
|
self.assertEqual(orig, copy)
|
||||||
|
self.assertTrue(orig.eq_dag(copy))
|
||||||
|
self.assertEqual(orig._normal, copy._normal)
|
||||||
|
self.assertEqual(orig._concrete, copy._concrete)
|
||||||
|
|
||||||
|
# ensure no shared nodes bt/w orig and copy.
|
||||||
|
orig_ids = set(id(s) for s in orig.traverse())
|
||||||
|
copy_ids = set(id(s) for s in copy.traverse())
|
||||||
|
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||||
|
|
||||||
|
|
||||||
|
def test_copy_normalized(self):
|
||||||
|
orig = Spec('mpileaks')
|
||||||
|
orig.normalize()
|
||||||
|
copy = orig.copy()
|
||||||
|
|
||||||
|
self.check_links(copy)
|
||||||
|
|
||||||
|
self.assertEqual(orig, copy)
|
||||||
|
self.assertTrue(orig.eq_dag(copy))
|
||||||
|
self.assertEqual(orig._normal, copy._normal)
|
||||||
|
self.assertEqual(orig._concrete, copy._concrete)
|
||||||
|
|
||||||
|
# ensure no shared nodes bt/w orig and copy.
|
||||||
|
orig_ids = set(id(s) for s in orig.traverse())
|
||||||
|
copy_ids = set(id(s) for s in copy.traverse())
|
||||||
|
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||||
|
|
||||||
|
|
||||||
|
def test_copy_concretized(self):
|
||||||
|
orig = Spec('mpileaks')
|
||||||
|
orig.concretize()
|
||||||
|
copy = orig.copy()
|
||||||
|
|
||||||
|
self.check_links(copy)
|
||||||
|
|
||||||
|
self.assertEqual(orig, copy)
|
||||||
|
self.assertTrue(orig.eq_dag(copy))
|
||||||
|
self.assertEqual(orig._normal, copy._normal)
|
||||||
|
self.assertEqual(orig._concrete, copy._concrete)
|
||||||
|
|
||||||
|
# ensure no shared nodes bt/w orig and copy.
|
||||||
|
orig_ids = set(id(s) for s in orig.traverse())
|
||||||
|
copy_ids = set(id(s) for s in copy.traverse())
|
||||||
|
self.assertFalse(orig_ids.intersection(copy_ids))
|
||||||
|
Loading…
Reference in New Issue
Block a user