Merge pull request #1862 from LLNL/features/graph-improvements

spack graph improvements
This commit is contained in:
Todd Gamblin 2016-09-28 00:10:14 -04:00 committed by GitHub
commit 78f4081bc9
9 changed files with 259 additions and 164 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
/db
/var/spack/stage /var/spack/stage
/var/spack/cache /var/spack/cache
/var/spack/repos/*/index.yaml /var/spack/repos/*/index.yaml

View File

@ -207,7 +207,7 @@ supply ``-p`` to Spack on the command line, before any subcommands.
``spack --profile`` output looks like this: ``spack --profile`` output looks like this:
.. command-output:: spack --profile graph dyninst .. command-output:: spack --profile graph --deptype=nobuild dyninst
:ellipsis: 25 :ellipsis: 25
The bottom of the output shows the top most time consuming functions, The bottom of the output shows the top most time consuming functions,

View File

@ -2888,9 +2888,22 @@ dependency graph. For example:
.. command-output:: spack graph mpileaks .. command-output:: spack graph mpileaks
At the top is the root package in the DAG, with dependency edges At the top is the root package in the DAG, with dependency edges emerging
emerging from it. On a color terminal, the edges are colored by which from it. On a color terminal, the edges are colored by which dependency
dependency they lead to. they lead to.
.. command-output:: spack graph --deptype=all mpileaks
The ``deptype`` argument tells Spack what types of dependencies to graph.
By default it includes link and run dependencies but not build
dependencies. Supplying ``--deptype=all`` will show the build
dependencies as well. This is equivalent to
``--deptype=build,link,run``. Options for ``deptype`` include:
* Any combination of ``build``, ``link``, and ``run`` separated by
commas.
* ``nobuild``, ``nolink``, ``norun`` to omit one type.
* ``all`` or ``alldeps`` for all types of dependencies.
You can also use ``spack graph`` to generate graphs in the widely used You can also use ``spack graph`` to generate graphs in the widely used
`Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For `Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For

View File

@ -24,8 +24,11 @@
############################################################################## ##############################################################################
import argparse import argparse
import llnl.util.tty as tty
import spack import spack
import spack.cmd import spack.cmd
from spack.spec import *
from spack.graph import * from spack.graph import *
description = "Generate graphs of package dependency relationships." description = "Generate graphs of package dependency relationships."
@ -43,8 +46,21 @@ def setup_parser(subparser):
help="Generate graph in dot format and print to stdout.") help="Generate graph in dot format and print to stdout.")
subparser.add_argument( subparser.add_argument(
'--concretize', action='store_true', '--normalize', action='store_true',
help="Concretize specs before graphing.") help="Skip concretization; only print normalized spec.")
subparser.add_argument(
'-s', '--static', action='store_true',
help="Use static information from packages, not dynamic spec info.")
subparser.add_argument(
'-i', '--installed', action='store_true',
help="Graph all installed specs in dot format (implies --dot).")
subparser.add_argument(
'-t', '--deptype', action='store',
help="Comma-separated list of deptypes to traverse. default=%s."
% ','.join(alldeps))
subparser.add_argument( subparser.add_argument(
'specs', nargs=argparse.REMAINDER, 'specs', nargs=argparse.REMAINDER,
@ -52,18 +68,32 @@ def setup_parser(subparser):
def graph(parser, args): def graph(parser, args):
specs = spack.cmd.parse_specs( concretize = not args.normalize
args.specs, normalize=True, concretize=args.concretize) if args.installed:
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
specs = spack.installed_db.query()
else:
specs = spack.cmd.parse_specs(
args.specs, normalize=True, concretize=concretize)
if not specs: if not specs:
setup_parser.parser.print_help() setup_parser.parser.print_help()
return 1 return 1
deptype = nobuild
if args.deptype:
deptype = tuple(args.deptype.split(','))
validate_deptype(deptype)
deptype = canonical_deptype(deptype)
if args.dot: # Dot graph only if asked for. if args.dot: # Dot graph only if asked for.
graph_dot(*specs) graph_dot(specs, static=args.static, deptype=deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug) graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]: for spec in specs[1:]:
print # extra line bt/w independent graphs print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug) graph_ascii(spec, debug=spack.debug)

View File

@ -67,22 +67,20 @@
from llnl.util.lang import * from llnl.util.lang import *
from llnl.util.tty.color import * from llnl.util.tty.color import *
import spack from spack.spec import *
from spack.spec import Spec
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] __all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, **kwargs): def topological_sort(spec, reverse=False, deptype=None):
"""Topological sort for specs. """Topological sort for specs.
Return a list of dependency specs sorted topologically. The spec Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process. argument is not modified in the process.
""" """
reverse = kwargs.get('reverse', False) deptype = canonical_deptype(deptype)
# XXX(deptype): iterate over a certain kind of dependency. Maybe color
# edges based on the type of dependency?
if not reverse: if not reverse:
parents = lambda s: s.dependents() parents = lambda s: s.dependents()
children = lambda s: s.dependencies() children = lambda s: s.dependencies()
@ -91,7 +89,7 @@ def topological_sort(spec, **kwargs):
children = lambda s: s.dependents() children = lambda s: s.dependents()
# Work on a copy so this is nondestructive. # Work on a copy so this is nondestructive.
spec = spec.copy() spec = spec.copy(deps=deptype)
nodes = spec.index() nodes = spec.index()
topo_order = [] topo_order = []
@ -129,7 +127,7 @@ def find(seq, predicate):
return -1 return -1
# Names of different graph line states. We Record previous line # Names of different graph line states. We record previous line
# states so that we can easily determine what to do when connecting. # states so that we can easily determine what to do when connecting.
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
@ -143,6 +141,7 @@ def __init__(self):
self.node_character = '*' self.node_character = '*'
self.debug = False self.debug = False
self.indent = 0 self.indent = 0
self.deptype = alldeps
# These are colors in the order they'll be used for edges. # These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters. # See llnl.util.tty.color for details on color characters.
@ -162,6 +161,9 @@ def _indent(self):
def _write_edge(self, string, index, sub=0): def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream.""" """Write a colored edge to the output stream."""
# Ignore empty frontier entries (they're just collapsed)
if not self._frontier[index]:
return
name = self._frontier[index][sub] name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string) edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge) self._out.write(edge)
@ -386,7 +388,7 @@ def write(self, spec, **kwargs):
self._out = ColorStream(sys.stdout, color=color) self._out = ColorStream(sys.stdout, color=color)
# We'll traverse the spec in topo order as we graph it. # We'll traverse the spec in topo order as we graph it.
topo_order = topological_sort(spec, reverse=True) topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
# Work on a copy to be nondestructive # Work on a copy to be nondestructive
spec = spec.copy() spec = spec.copy()
@ -420,20 +422,26 @@ def write(self, spec, **kwargs):
if back: if back:
back.sort() back.sort()
prev_ends = [] prev_ends = []
collapse_l1 = False
for j, (b, d) in enumerate(back): for j, (b, d) in enumerate(back):
self._frontier[i].remove(d) self._frontier[i].remove(d)
if i - b > 1: if i - b > 1:
self._back_edge_line(prev_ends, b, i, False, collapse_l1 = any(not e for e in self._frontier)
'left-1') self._back_edge_line(
prev_ends, b, i, collapse_l1, 'left-1')
del prev_ends[:] del prev_ends[:]
prev_ends.append(b) prev_ends.append(b)
# Check whether we did ALL the deps as back edges, # Check whether we did ALL the deps as back edges,
# in which case we're done. # in which case we're done.
collapse = not self._frontier[i] pop = not self._frontier[i]
if collapse: collapse_l2 = pop
if collapse_l1:
collapse_l2 = False
if pop:
self._frontier.pop(i) self._frontier.pop(i)
self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2') self._back_edge_line(
prev_ends, -1, -1, collapse_l2, 'left-2')
elif len(self._frontier[i]) > 1: elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections # Expand forward after doing all back connections
@ -476,32 +484,28 @@ def write(self, spec, **kwargs):
# Replace node with its dependencies # Replace node with its dependencies
self._frontier.pop(i) self._frontier.pop(i)
if node.dependencies(): deps = node.dependencies(self.deptype)
deps = sorted((d.name for d in node.dependencies()), if deps:
reverse=True) deps = sorted((d.name for d in deps), reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere. self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier: elif self._frontier:
self._collapse_line(i) self._collapse_line(i)
def graph_ascii(spec, **kwargs): def graph_ascii(spec, node='o', out=None, debug=False,
node_character = kwargs.get('node', 'o') indent=0, color=None, deptype=None):
out = kwargs.pop('out', None)
debug = kwargs.pop('debug', False)
indent = kwargs.pop('indent', 0)
color = kwargs.pop('color', None)
check_kwargs(kwargs, graph_ascii)
graph = AsciiGraph() graph = AsciiGraph()
graph.debug = debug graph.debug = debug
graph.indent = indent graph.indent = indent
graph.node_character = node_character graph.node_character = node
if deptype:
graph.deptype = canonical_deptype(deptype)
graph.write(spec, color=color, out=out) graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs): def graph_dot(specs, deptype=None, static=False, out=None):
"""Generate a graph in dot format of all provided specs. """Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between Print out a dot formatted graph of all the dependencies between
@ -510,42 +514,73 @@ def graph_dot(*specs, **kwargs):
spack graph --dot qt | dot -Tpdf > spack-graph.pdf spack graph --dot qt | dot -Tpdf > spack-graph.pdf
""" """
out = kwargs.pop('out', sys.stdout) if out is None:
check_kwargs(kwargs, graph_dot) out = sys.stdout
if deptype is None:
deptype = alldeps
out.write('digraph G {\n') out.write('digraph G {\n')
out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n') out.write(' labelloc = "b"\n')
out.write(' rankdir = "LR"\n') out.write(' rankdir = "LR"\n')
out.write(' ranksep = "5"\n') out.write(' ranksep = "5"\n')
out.write('node[\n')
out.write(' fontname=Monaco,\n')
out.write(' penwidth=2,\n')
out.write(' fontsize=12,\n')
out.write(' margin=.1,\n')
out.write(' shape=box,\n')
out.write(' fillcolor=lightblue,\n')
out.write(' style="rounded,filled"]\n')
out.write('\n') out.write('\n')
def quote(string): def q(string):
return '"%s"' % string return '"%s"' % string
if not specs: if not specs:
specs = [p.name for p in spack.repo.all_packages()] raise ValueError("Must provide specs ot graph_dot")
else:
roots = specs
specs = set()
for spec in roots:
specs.update(Spec(s.name) for s in spec.normalized().traverse())
deps = [] # Static graph includes anything a package COULD depend on.
if static:
names = set.union(*[s.package.possible_dependencies() for s in specs])
specs = [Spec(name) for name in names]
labeled = set()
def label(key, label):
if key not in labeled:
out.write(' "%s" [label="%s"]\n' % (key, label))
labeled.add(key)
deps = set()
for spec in specs: for spec in specs:
out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name)) if static:
out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name))
# Skip virtual specs (we'll find out about them from concrete ones. # Skip virtual specs (we'll find out about them from concrete ones.
if spec.virtual: if spec.virtual:
continue continue
# Add edges for each depends_on in the package. # Add edges for each depends_on in the package.
for dep_name, dep in spec.package.dependencies.iteritems(): for dep_name, dep in spec.package.dependencies.iteritems():
deps.append((spec.name, dep_name)) deps.add((spec.name, dep_name))
# If the package provides something, add an edge for that. # If the package provides something, add an edge for that.
for provider in set(s.name for s in spec.package.provided): for provider in set(s.name for s in spec.package.provided):
deps.append((provider, spec.name)) deps.add((provider, spec.name))
else:
def key_label(s):
return s.dag_hash(), "%s-%s" % (s.name, s.dag_hash(7))
for s in spec.traverse(deptype=deptype):
skey, slabel = key_label(s)
out.write(' "%s" [label="%s"]\n' % (skey, slabel))
for d in s.dependencies(deptype=deptype):
dkey, _ = key_label(d)
deps.add((skey, dkey))
out.write('\n') out.write('\n')

View File

@ -411,6 +411,20 @@ def __init__(self, spec):
if self.is_extension: if self.is_extension:
spack.repo.get(self.extendee_spec)._check_extendable() spack.repo.get(self.extendee_spec)._check_extendable()
def possible_dependencies(self, visited=None):
"""Return set of possible transitive dependencies of this package."""
if visited is None:
visited = set()
visited.add(self.name)
for name in self.dependencies:
if name not in visited and not spack.spec.Spec(name).virtual:
pkg = spack.repo.get(name)
for name in pkg.possible_dependencies(visited):
visited.add(name)
return visited
@property @property
def package_dir(self): def package_dir(self):
"""Return the directory where the package.py file lives.""" """Return the directory where the package.py file lives."""

View File

@ -123,6 +123,39 @@
from spack.version import * from spack.version import *
from spack.provider_index import ProviderIndex from spack.provider_index import ProviderIndex
__all__ = [
'Spec',
'alldeps',
'nolink',
'nobuild',
'canonical_deptype',
'validate_deptype',
'parse',
'parse_anonymous_spec',
'SpecError',
'SpecParseError',
'DuplicateDependencyError',
'DuplicateVariantError',
'DuplicateCompilerSpecError',
'UnsupportedCompilerError',
'UnknownVariantError',
'DuplicateArchitectureError',
'InconsistentSpecError',
'InvalidDependencyError',
'InvalidDependencyTypeError',
'NoProviderError',
'MultipleProviderError',
'UnsatisfiableSpecError',
'UnsatisfiableSpecNameError',
'UnsatisfiableVersionSpecError',
'UnsatisfiableCompilerSpecError',
'UnsatisfiableVariantSpecError',
'UnsatisfiableCompilerFlagSpecError',
'UnsatisfiableArchitectureSpecError',
'UnsatisfiableProviderSpecError',
'UnsatisfiableDependencySpecError',
'SpackYAMLError',
'AmbiguousHashError']
# Valid pattern for an identifier in Spack # Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*' identifier_re = r'\w[\w-]*'
@ -156,12 +189,46 @@
# Special types of dependencies. # Special types of dependencies.
alldeps = ('build', 'link', 'run') alldeps = ('build', 'link', 'run')
nolink = ('build', 'run') nolink = ('build', 'run')
nobuild = ('link', 'run')
norun = ('link', 'build')
special_types = { special_types = {
'alldeps': alldeps, 'alldeps': alldeps,
'all': alldeps, # allow "all" as string but not symbol.
'nolink': nolink, 'nolink': nolink,
'nobuild': nobuild,
'norun': norun,
} }
legal_deps = tuple(special_types) + alldeps
def validate_deptype(deptype):
if isinstance(deptype, str):
if deptype not in legal_deps:
raise InvalidDependencyTypeError(
"Invalid dependency type: %s" % deptype)
elif isinstance(deptype, (list, tuple)):
for t in deptype:
validate_deptype(t)
elif deptype is None:
raise InvalidDependencyTypeError("deptype cannot be None!")
def canonical_deptype(deptype):
if deptype is None:
return alldeps
elif isinstance(deptype, str):
return special_types.get(deptype, (deptype,))
elif isinstance(deptype, (tuple, list)):
return (sum((canonical_deptype(d) for d in deptype), ()))
return deptype
def colorize_spec(spec): def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in """Returns a spec colorized according to the colors specified in
@ -542,17 +609,8 @@ def get_dependency(self, name):
raise InvalidDependencyException( raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name)) self.name + " does not depend on " + comma_or(name))
def _deptype_norm(self, deptype):
if deptype is None:
return alldeps
# Force deptype to be a set object so that we can do set intersections.
if isinstance(deptype, str):
# Support special deptypes.
return special_types.get(deptype, (deptype,))
return deptype
def _find_deps(self, where, deptype): def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype) deptype = canonical_deptype(deptype)
return [dep.spec return [dep.spec
for dep in where.values() for dep in where.values()
@ -565,7 +623,7 @@ def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype) return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype): def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype) deptype = canonical_deptype(deptype)
return dict((dep.spec.name, dep) return dict((dep.spec.name, dep)
for dep in where.values() for dep in where.values()
@ -1361,12 +1419,11 @@ def flat_dependencies_with_deptype(self, **kwargs):
# parser doesn't allow it. Spack must be broken! # parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
def index(self): def index(self, deptype=None):
"""Return DependencyMap that points to all the dependencies in this """Return DependencyMap that points to all the dependencies in this
spec.""" spec."""
dm = DependencyMap() dm = DependencyMap()
# XXX(deptype): use a deptype kwarg. for spec in self.traverse(deptype=deptype):
for spec in self.traverse():
dm[spec.name] = spec dm[spec.name] = spec
return dm return dm
@ -1569,7 +1626,7 @@ def normalize(self, force=False):
# actually deps of this package. Raise an error. # actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited) extra = set(spec_deps.keys()).difference(visited)
if extra: if extra:
raise InvalidDependencyException( raise InvalidDependencyError(
self.name + " does not depend on " + comma_or(extra)) self.name + " does not depend on " + comma_or(extra))
# Mark the spec as normal once done. # Mark the spec as normal once done.
@ -2667,17 +2724,11 @@ def parse_anonymous_spec(spec_like, pkg_name):
class SpecError(spack.error.SpackError): class SpecError(spack.error.SpackError):
"""Superclass for all errors that occur while constructing specs.""" """Superclass for all errors that occur while constructing specs."""
def __init__(self, message):
super(SpecError, self).__init__(message)
class SpecParseError(SpecError): class SpecParseError(SpecError):
"""Wrapper for ParseError for when we're parsing specs.""" """Wrapper for ParseError for when we're parsing specs."""
def __init__(self, parse_error): def __init__(self, parse_error):
super(SpecParseError, self).__init__(parse_error.message) super(SpecParseError, self).__init__(parse_error.message)
self.string = parse_error.string self.string = parse_error.string
@ -2685,79 +2736,53 @@ def __init__(self, parse_error):
class DuplicateDependencyError(SpecError): class DuplicateDependencyError(SpecError):
"""Raised when the same dependency occurs in a spec twice.""" """Raised when the same dependency occurs in a spec twice."""
def __init__(self, message):
super(DuplicateDependencyError, self).__init__(message)
class DuplicateVariantError(SpecError): class DuplicateVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice.""" """Raised when the same variant occurs in a spec twice."""
def __init__(self, message):
super(DuplicateVariantError, self).__init__(message)
class DuplicateCompilerSpecError(SpecError): class DuplicateCompilerSpecError(SpecError):
"""Raised when the same compiler occurs in a spec twice.""" """Raised when the same compiler occurs in a spec twice."""
def __init__(self, message):
super(DuplicateCompilerSpecError, self).__init__(message)
class UnsupportedCompilerError(SpecError): class UnsupportedCompilerError(SpecError):
"""Raised when the user asks for a compiler spack doesn't know about.""" """Raised when the user asks for a compiler spack doesn't know about."""
def __init__(self, compiler_name): def __init__(self, compiler_name):
super(UnsupportedCompilerError, self).__init__( super(UnsupportedCompilerError, self).__init__(
"The '%s' compiler is not yet supported." % compiler_name) "The '%s' compiler is not yet supported." % compiler_name)
class UnknownVariantError(SpecError): class UnknownVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice.""" """Raised when the same variant occurs in a spec twice."""
def __init__(self, pkg, variant): def __init__(self, pkg, variant):
super(UnknownVariantError, self).__init__( super(UnknownVariantError, self).__init__(
"Package %s has no variant %s!" % (pkg, variant)) "Package %s has no variant %s!" % (pkg, variant))
class DuplicateArchitectureError(SpecError): class DuplicateArchitectureError(SpecError):
"""Raised when the same architecture occurs in a spec twice.""" """Raised when the same architecture occurs in a spec twice."""
def __init__(self, message):
super(DuplicateArchitectureError, self).__init__(message)
class InconsistentSpecError(SpecError): class InconsistentSpecError(SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent """Raised when two nodes in the same spec DAG have inconsistent
constraints.""" constraints."""
def __init__(self, message):
super(InconsistentSpecError, self).__init__(message)
class InvalidDependencyException(SpecError):
class InvalidDependencyError(SpecError):
"""Raised when a dependency in a spec is not actually a dependency """Raised when a dependency in a spec is not actually a dependency
of the package.""" of the package."""
def __init__(self, message):
super(InvalidDependencyException, self).__init__(message) class InvalidDependencyTypeError(SpecError):
"""Raised when a dependency type is not a legal Spack dep type."""
class NoProviderError(SpecError): class NoProviderError(SpecError):
"""Raised when there is no package that provides a particular """Raised when there is no package that provides a particular
virtual dependency. virtual dependency.
""" """
def __init__(self, vpkg): def __init__(self, vpkg):
super(NoProviderError, self).__init__( super(NoProviderError, self).__init__(
"No providers found for virtual package: '%s'" % vpkg) "No providers found for virtual package: '%s'" % vpkg)
@ -2765,11 +2790,9 @@ def __init__(self, vpkg):
class MultipleProviderError(SpecError): class MultipleProviderError(SpecError):
"""Raised when there is no package that provides a particular """Raised when there is no package that provides a particular
virtual dependency. virtual dependency.
""" """
def __init__(self, vpkg, providers): def __init__(self, vpkg, providers):
"""Takes the name of the vpkg""" """Takes the name of the vpkg"""
super(MultipleProviderError, self).__init__( super(MultipleProviderError, self).__init__(
@ -2780,10 +2803,8 @@ def __init__(self, vpkg, providers):
class UnsatisfiableSpecError(SpecError): class UnsatisfiableSpecError(SpecError):
"""Raised when a spec conflicts with package constraints. """Raised when a spec conflicts with package constraints.
Provide the requirement that was violated when raising.""" Provide the requirement that was violated when raising."""
def __init__(self, provided, required, constraint_type): def __init__(self, provided, required, constraint_type):
super(UnsatisfiableSpecError, self).__init__( super(UnsatisfiableSpecError, self).__init__(
"%s does not satisfy %s" % (provided, required)) "%s does not satisfy %s" % (provided, required))
@ -2793,89 +2814,70 @@ def __init__(self, provided, required, constraint_type):
class UnsatisfiableSpecNameError(UnsatisfiableSpecError): class UnsatisfiableSpecNameError(UnsatisfiableSpecError):
"""Raised when two specs aren't even for the same package.""" """Raised when two specs aren't even for the same package."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableSpecNameError, self).__init__( super(UnsatisfiableSpecNameError, self).__init__(
provided, required, "name") provided, required, "name")
class UnsatisfiableVersionSpecError(UnsatisfiableSpecError): class UnsatisfiableVersionSpecError(UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints.""" """Raised when a spec version conflicts with package constraints."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableVersionSpecError, self).__init__( super(UnsatisfiableVersionSpecError, self).__init__(
provided, required, "version") provided, required, "version")
class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError): class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints.""" """Raised when a spec comiler conflicts with package constraints."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableCompilerSpecError, self).__init__( super(UnsatisfiableCompilerSpecError, self).__init__(
provided, required, "compiler") provided, required, "compiler")
class UnsatisfiableVariantSpecError(UnsatisfiableSpecError): class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints.""" """Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableVariantSpecError, self).__init__( super(UnsatisfiableVariantSpecError, self).__init__(
provided, required, "variant") provided, required, "variant")
class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError): class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints.""" """Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableCompilerFlagSpecError, self).__init__( super(UnsatisfiableCompilerFlagSpecError, self).__init__(
provided, required, "compiler_flags") provided, required, "compiler_flags")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError): class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints.""" """Raised when a spec architecture conflicts with package constraints."""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__( super(UnsatisfiableArchitectureSpecError, self).__init__(
provided, required, "architecture") provided, required, "architecture")
class UnsatisfiableProviderSpecError(UnsatisfiableSpecError): class UnsatisfiableProviderSpecError(UnsatisfiableSpecError):
"""Raised when a provider is supplied but constraints don't match """Raised when a provider is supplied but constraints don't match
a vpkg requirement""" a vpkg requirement"""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableProviderSpecError, self).__init__( super(UnsatisfiableProviderSpecError, self).__init__(
provided, required, "provider") provided, required, "provider")
# TODO: get rid of this and be more specific about particular incompatible # TODO: get rid of this and be more specific about particular incompatible
# dep constraints # dep constraints
class UnsatisfiableDependencySpecError(UnsatisfiableSpecError): class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
"""Raised when some dependency of constrained specs are incompatible""" """Raised when some dependency of constrained specs are incompatible"""
def __init__(self, provided, required): def __init__(self, provided, required):
super(UnsatisfiableDependencySpecError, self).__init__( super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency") provided, required, "dependency")
class SpackYAMLError(spack.error.SpackError): class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error): def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error)) super(SpackYAMLError, self).__init__(msg, str(yaml_error))
class AmbiguousHashError(SpecError): class AmbiguousHashError(SpecError):
def __init__(self, msg, *specs): def __init__(self, msg, *specs):
super(AmbiguousHashError, self).__init__(msg) super(AmbiguousHashError, self).__init__(msg)
for spec in specs: for spec in specs:

View File

@ -241,15 +241,15 @@ def test_unsatisfiable_architecture(self):
def test_invalid_dep(self): def test_invalid_dep(self):
spec = Spec('libelf ^mpich') spec = Spec('libelf ^mpich')
self.assertRaises(spack.spec.InvalidDependencyException, self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize) spec.normalize)
spec = Spec('libelf ^libdwarf') spec = Spec('libelf ^libdwarf')
self.assertRaises(spack.spec.InvalidDependencyException, self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize) spec.normalize)
spec = Spec('mpich ^dyninst ^libelf') spec = Spec('mpich ^dyninst ^libelf')
self.assertRaises(spack.spec.InvalidDependencyException, self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize) spec.normalize)
def test_equal(self): def test_equal(self):

View File

@ -24,34 +24,34 @@
############################################################################## ##############################################################################
import unittest import unittest
import spack.spec import spack.spec as sp
from spack.parse import Token from spack.parse import Token
from spack.spec import * from spack.spec import *
# Sample output for a complex lexing. # Sample output for a complex lexing.
complex_lex = [Token(ID, 'mvapich_foo'), complex_lex = [Token(sp.ID, 'mvapich_foo'),
Token(DEP), Token(sp.DEP),
Token(ID, '_openmpi'), Token(sp.ID, '_openmpi'),
Token(AT), Token(sp.AT),
Token(ID, '1.2'), Token(sp.ID, '1.2'),
Token(COLON), Token(sp.COLON),
Token(ID, '1.4'), Token(sp.ID, '1.4'),
Token(COMMA), Token(sp.COMMA),
Token(ID, '1.6'), Token(sp.ID, '1.6'),
Token(PCT), Token(sp.PCT),
Token(ID, 'intel'), Token(sp.ID, 'intel'),
Token(AT), Token(sp.AT),
Token(ID, '12.1'), Token(sp.ID, '12.1'),
Token(COLON), Token(sp.COLON),
Token(ID, '12.6'), Token(sp.ID, '12.6'),
Token(ON), Token(sp.ON),
Token(ID, 'debug'), Token(sp.ID, 'debug'),
Token(OFF), Token(sp.OFF),
Token(ID, 'qt_4'), Token(sp.ID, 'qt_4'),
Token(DEP), Token(sp.DEP),
Token(ID, 'stackwalker'), Token(sp.ID, 'stackwalker'),
Token(AT), Token(sp.AT),
Token(ID, '8.1_1e')] Token(sp.ID, '8.1_1e')]
class SpecSyntaxTest(unittest.TestCase): class SpecSyntaxTest(unittest.TestCase):
@ -74,16 +74,16 @@ def check_parse(self, expected, spec=None, remove_arch=True):
""" """
if spec is None: if spec is None:
spec = expected spec = expected
output = spack.spec.parse(spec) output = sp.parse(spec)
parsed = (" ".join(str(spec) for spec in output)) parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed) self.assertEqual(expected, parsed)
def check_lex(self, tokens, spec): def check_lex(self, tokens, spec):
"""Check that the provided spec parses to the provided token list.""" """Check that the provided spec parses to the provided token list."""
lex_output = SpecLexer().lex(spec) lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output): for tok, spec_tok in zip(tokens, lex_output):
if tok.type == ID: if tok.type == sp.ID:
self.assertEqual(tok, spec_tok) self.assertEqual(tok, spec_tok)
else: else:
# Only check the type for non-identifiers. # Only check the type for non-identifiers.