Fixes for virtual packages.
- Added more tests - cleaned up spec preorder traversal - fixed concretization
This commit is contained in:
parent
90f2154a32
commit
e0c029c347
@ -5,6 +5,9 @@
|
||||
|
||||
import spack
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('file', nargs='?', help="file to run")
|
||||
|
||||
description = "Launch an interpreter as spack would launch a command"
|
||||
|
||||
def python(parser, args):
|
||||
@ -16,6 +19,10 @@ def python(parser, args):
|
||||
with closing(open(startup_file)) as startup:
|
||||
console.runsource(startup.read(), startup_file, 'exec')
|
||||
|
||||
console.interact("Spack version %s\nPython %s, %s %s"""
|
||||
% (spack.spack_version, platform.python_version(),
|
||||
platform.system(), platform.machine()))
|
||||
if args.file:
|
||||
with closing(open(args.file)) as file:
|
||||
console.runsource(file.read(), args.file, 'exec')
|
||||
else:
|
||||
console.interact("Spack version %s\nPython %s, %s %s"""
|
||||
% (spack.spack_version, platform.python_version(),
|
||||
platform.system(), platform.machine()))
|
||||
|
@ -12,8 +12,9 @@
|
||||
import spack.arch
|
||||
import spack.compilers
|
||||
import spack.packages
|
||||
import spack.spec
|
||||
from spack.version import *
|
||||
from spack.spec import *
|
||||
|
||||
|
||||
|
||||
class DefaultConcretizer(object):
|
||||
@ -31,12 +32,12 @@ def concretize_version(self, spec):
|
||||
if spec.versions.concrete:
|
||||
return
|
||||
|
||||
pkg = spec.package
|
||||
|
||||
# If there are known avaialble versions, return the most recent
|
||||
available_versions = pkg.available_versions
|
||||
if available_versions:
|
||||
spec.versions = ver([available_versions[-1]])
|
||||
# version that satisfies the spec
|
||||
pkg = spec.package
|
||||
valid_versions = pkg.available_versions.intersection(spec.versions)
|
||||
if valid_versions:
|
||||
spec.versions = ver([valid_versions[-1]])
|
||||
else:
|
||||
spec.versions = ver([pkg.version])
|
||||
|
||||
@ -91,9 +92,11 @@ def choose_provider(self, spec, providers):
|
||||
"""This is invoked for virtual specs. Given a spec with a virtual name,
|
||||
say "mpi", and a list of specs of possible providers of that spec,
|
||||
select a provider and return it.
|
||||
|
||||
Default implementation just chooses the last provider in sorted order.
|
||||
"""
|
||||
assert(spec.virtual)
|
||||
assert(providers)
|
||||
return sorted(providers)[-1]
|
||||
|
||||
index = spack.spec.index_specs(providers)
|
||||
first_key = sorted(index.keys())[0]
|
||||
latest_version = sorted(index[first_key])[-1]
|
||||
return latest_version
|
||||
|
@ -39,41 +39,53 @@ class ProviderIndex(object):
|
||||
Calling find_provider(spec) will find a package that provides a
|
||||
matching implementation of MPI.
|
||||
"""
|
||||
def __init__(self, providers):
|
||||
"""Takes a list of provider packagse and build an index of the virtual
|
||||
packages they provide."""
|
||||
def __init__(self, specs, **kwargs):
|
||||
restrict = kwargs.setdefault('restrict', False)
|
||||
self.providers = {}
|
||||
self.add(*providers)
|
||||
|
||||
for spec in specs:
|
||||
if type(spec) != spack.spec.Spec:
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
def add(self, *providers):
|
||||
"""Look at the provided map on the provider packages, invert it and
|
||||
add it to this provider index."""
|
||||
for pkg in providers:
|
||||
if spec.virtual:
|
||||
continue
|
||||
|
||||
pkg = spec.package
|
||||
for provided_spec, provider_spec in pkg.provided.iteritems():
|
||||
provided_name = provided_spec.name
|
||||
if provided_name not in self.providers:
|
||||
self.providers[provided_name] = {}
|
||||
self.providers[provided_name][provided_spec] = provider_spec
|
||||
if provider_spec.satisfies(spec):
|
||||
provided_name = provided_spec.name
|
||||
if provided_name not in self.providers:
|
||||
self.providers[provided_name] = {}
|
||||
|
||||
if restrict:
|
||||
self.providers[provided_name][provided_spec] = spec
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain it so that
|
||||
# it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
self.providers[provided_name][provided_spec] = constrained
|
||||
|
||||
|
||||
|
||||
def providers_for(self, *vpkg_specs):
|
||||
"""Gives names of all packages that provide virtual packages
|
||||
with the supplied names."""
|
||||
packages = set()
|
||||
providers = set()
|
||||
for vspec in vpkg_specs:
|
||||
# Allow string names to be passed as input, as well as specs
|
||||
if type(vspec) == str:
|
||||
vspec = spack.spec.Spec(vspec)
|
||||
|
||||
# Add all the packages that satisfy the vpkg spec.
|
||||
# Add all the providers that satisfy the vpkg spec.
|
||||
if vspec.name in self.providers:
|
||||
for provider_spec, pkg in self.providers[vspec.name].items():
|
||||
for provider_spec, spec in self.providers[vspec.name].items():
|
||||
if provider_spec.satisfies(vspec):
|
||||
packages.add(pkg)
|
||||
providers.add(spec)
|
||||
|
||||
# Return packages in order
|
||||
return sorted(packages)
|
||||
# Return providers in order
|
||||
return sorted(providers)
|
||||
|
||||
|
||||
def get(pkg_name):
|
||||
@ -86,7 +98,7 @@ def get(pkg_name):
|
||||
|
||||
def providers_for(vpkg_spec):
|
||||
if providers_for.index is None:
|
||||
providers_for.index = ProviderIndex(all_packages())
|
||||
providers_for.index = ProviderIndex(all_package_names())
|
||||
|
||||
providers = providers_for.index.providers_for(vpkg_spec)
|
||||
if not providers:
|
||||
|
@ -5,6 +5,9 @@ class Mpich(Package):
|
||||
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
|
||||
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
|
||||
|
||||
list_url = "http://www.mpich.org/static/downloads/"
|
||||
list_depth = 2
|
||||
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
|
@ -43,9 +43,9 @@ def lex(self, text):
|
||||
class Parser(object):
|
||||
"""Base class for simple recursive descent parsers."""
|
||||
def __init__(self, lexer):
|
||||
self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty.
|
||||
self.token = None # last accepted token
|
||||
self.next = None # next token
|
||||
self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty.
|
||||
self.token = Token(None) # last accepted token starts at beginning of file
|
||||
self.next = None # next token
|
||||
self.lexer = lexer
|
||||
self.text = None
|
||||
|
||||
|
@ -94,8 +94,8 @@ def _ensure_caller_is_spack_package():
|
||||
|
||||
def _parse_local_spec(spec_like, pkg_name):
|
||||
"""Allow the user to omit the package name part of a spec in relations.
|
||||
e.g., provides('mpi@2.1', when='@1.9:') says that this package provides
|
||||
MPI 2.1 when its version is higher than 1.9.
|
||||
e.g., provides('mpi@2', when='@1.9:') says that this package provides
|
||||
MPI-3 when its version is higher than 1.9.
|
||||
"""
|
||||
if type(spec_like) not in (str, Spec):
|
||||
raise TypeError('spec must be Spec or spec string. Found %s'
|
||||
@ -104,7 +104,7 @@ def _parse_local_spec(spec_like, pkg_name):
|
||||
if type(spec_like) == str:
|
||||
try:
|
||||
local_spec = Spec(spec_like)
|
||||
except ParseError:
|
||||
except spack.parse.ParseError:
|
||||
local_spec = Spec(pkg_name + spec_like)
|
||||
if local_spec.name != pkg_name: raise ValueError(
|
||||
"Invalid spec for package %s: %s" % (pkg_name, spec_like))
|
||||
@ -118,21 +118,17 @@ def _parse_local_spec(spec_like, pkg_name):
|
||||
return local_spec
|
||||
|
||||
|
||||
|
||||
|
||||
def _make_relation(map_name):
|
||||
def relation_fun(*specs):
|
||||
_ensure_caller_is_spack_package()
|
||||
package_map = _caller_locals().setdefault(map_name, {})
|
||||
for string in specs:
|
||||
for spec in spack.spec.parse(string):
|
||||
package_map[spec.name] = spec
|
||||
return relation_fun
|
||||
|
||||
|
||||
"""Adds a dependencies local variable in the locals of
|
||||
the calling class, based on args. """
|
||||
depends_on = _make_relation("dependencies")
|
||||
def depends_on(*specs):
|
||||
pkg = _ensure_caller_is_spack_package()
|
||||
|
||||
dependencies = _caller_locals().setdefault('dependencies', {})
|
||||
for string in specs:
|
||||
for spec in spack.spec.parse(string):
|
||||
if pkg == spec.name:
|
||||
raise CircularDependencyError('depends_on', pkg)
|
||||
dependencies[spec.name] = spec
|
||||
|
||||
|
||||
def provides(*specs, **kwargs):
|
||||
@ -153,13 +149,30 @@ def provides(*specs, **kwargs):
|
||||
"""Packages can declare conflicts with other packages.
|
||||
This can be as specific as you like: use regular spec syntax.
|
||||
"""
|
||||
conflicts = _make_relation("conflicted")
|
||||
def conflicts(*specs):
|
||||
# TODO: implement conflicts
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class ScopeError(spack.error.SpackError):
|
||||
|
||||
class RelationError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package relation."""
|
||||
def __init__(self, relation, message):
|
||||
super(RelationError, self).__init__(message)
|
||||
self.relation = relation
|
||||
|
||||
|
||||
class ScopeError(RelationError):
|
||||
"""This is raised when a relation is called from outside a spack package."""
|
||||
def __init__(self, relation):
|
||||
super(ScopeError, self).__init__(
|
||||
relation,
|
||||
"Cannot inovke '%s' from outside of a Spack package!" % relation)
|
||||
self.relation = relation
|
||||
|
||||
|
||||
class CircularDependencyError(RelationError):
|
||||
"""This is raised when something depends on itself."""
|
||||
def __init__(self, relation, package):
|
||||
super(CircularDependencyError, self).__init__(
|
||||
relation, "Package %s cannot depend on itself." % package)
|
||||
|
@ -93,6 +93,19 @@
|
||||
separators = '[%s]' % ''.join(color_formats.keys())
|
||||
|
||||
|
||||
def index_specs(specs):
|
||||
"""Take a list of specs and return a dict of lists. Dict is
|
||||
keyed by spec name and lists include all specs with the
|
||||
same name.
|
||||
"""
|
||||
spec_dict = {}
|
||||
for spec in specs:
|
||||
if not spec.name in spec_dict:
|
||||
spec_dict[spec.name] = []
|
||||
spec_dict[spec.name].append(spec)
|
||||
return spec_dict
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
"""Returns a spec colorized according to the colors specified in
|
||||
color_formats."""
|
||||
@ -360,11 +373,17 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
|
||||
"""Generic preorder traversal of the DAG represented by this spec.
|
||||
This will yield each node in the spec. Options:
|
||||
|
||||
unique [=True]
|
||||
When True (default), every node in the DAG is yielded only once.
|
||||
When False, the traversal will yield already visited
|
||||
nodes but not their children. This lets you see that a node
|
||||
points to an already-visited subgraph without descending into it.
|
||||
cover [=nodes|edges|paths]
|
||||
Determines how extensively to cover the dag. Possible vlaues:
|
||||
|
||||
'nodes': Visit each node in the dag only once. Every node
|
||||
yielded by this function will be unique.
|
||||
'edges': If a node has been visited once but is reached along a
|
||||
new path from the root, yield it but do not descend
|
||||
into it. This traverses each 'edge' in the DAG once.
|
||||
'paths': Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will yield
|
||||
nodes twice if they're reachable by multiple paths.
|
||||
|
||||
depth [=False]
|
||||
Defaults to False. When True, yields not just nodes in the
|
||||
@ -375,30 +394,37 @@ def preorder_traversal(self, visited=None, d=0, **kwargs):
|
||||
Allow a custom key function to track the identity of nodes
|
||||
in the traversal.
|
||||
|
||||
noroot [=False]
|
||||
If true, this won't yield the root node, just its descendents.
|
||||
root [=True]
|
||||
If false, this won't yield the root node, just its descendents.
|
||||
"""
|
||||
unique = kwargs.setdefault('unique', True)
|
||||
depth = kwargs.setdefault('depth', False)
|
||||
keyfun = kwargs.setdefault('key', id)
|
||||
noroot = kwargs.setdefault('noroot', False)
|
||||
depth = kwargs.setdefault('depth', False)
|
||||
key_fun = kwargs.setdefault('key', id)
|
||||
yield_root = kwargs.setdefault('root', True)
|
||||
cover = kwargs.setdefault('cover', 'nodes')
|
||||
|
||||
cover_values = ('nodes', 'edges', 'paths')
|
||||
if cover not in cover_values:
|
||||
raise ValueError("Invalid value for cover: %s. Choices are %s"
|
||||
% (cover, ",".join(cover_values)))
|
||||
|
||||
if visited is None:
|
||||
visited = set()
|
||||
|
||||
if keyfun(self) in visited:
|
||||
if not unique:
|
||||
yield (d, self) if depth else self
|
||||
return
|
||||
visited.add(keyfun(self))
|
||||
result = (d, self) if depth else self
|
||||
key = key_fun(self)
|
||||
|
||||
if d > 0 or not noroot:
|
||||
yield (d, self) if depth else self
|
||||
if key in visited:
|
||||
if cover == 'nodes': return
|
||||
if yield_root or d > 0: yield result
|
||||
if cover == 'edges': return
|
||||
else:
|
||||
if yield_root or d > 0: yield result
|
||||
|
||||
for key in sorted(self.dependencies.keys()):
|
||||
for result in self.dependencies[key].preorder_traversal(
|
||||
visited, d+1, **kwargs):
|
||||
yield result
|
||||
visited.add(key)
|
||||
for name in sorted(self.dependencies):
|
||||
child = self.dependencies[name]
|
||||
for elt in child.preorder_traversal(visited, d+1, **kwargs):
|
||||
yield elt
|
||||
|
||||
|
||||
def _concretize_helper(self, presets=None, visited=None):
|
||||
@ -436,6 +462,14 @@ def _expand_virtual_packages(self):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
and normalize again to include the provider's (potentially virtual)
|
||||
dependencies. Repeat until there are no virtual deps.
|
||||
|
||||
TODO: If a provider depends on something that conflicts with
|
||||
other dependencies in the spec being expanded, this can
|
||||
produce a conflicting spec. For example, if mpich depends
|
||||
on hwloc@:1.3 but something in the spec needs hwloc1.4:,
|
||||
then we should choose an MPI other than mpich. Cases like
|
||||
this are infrequent, but should implement this before it is
|
||||
a problem.
|
||||
"""
|
||||
while True:
|
||||
virtuals =[v for v in self.preorder_traversal() if v.virtual]
|
||||
@ -545,8 +579,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||
providers = provider_index.providers_for(pkg_dep)
|
||||
|
||||
# If there is a provider for the vpkg, then use that instead of
|
||||
# the virtual package. If there isn't a provider, just merge
|
||||
# constraints on the virtual package.
|
||||
# the virtual package.
|
||||
if providers:
|
||||
# Can't have multiple providers for the same thing in one spec.
|
||||
if len(providers) > 1:
|
||||
@ -613,7 +646,7 @@ def normalize(self):
|
||||
# Remove virtual deps that are already provided by something in the spec
|
||||
spec_packages = [d.package for d in spec_deps.values() if not d.virtual]
|
||||
|
||||
index = packages.ProviderIndex(spec_packages)
|
||||
index = packages.ProviderIndex(spec_deps.values(), restrict=True)
|
||||
visited = set()
|
||||
self._normalize_helper(visited, spec_deps, index)
|
||||
|
||||
@ -666,6 +699,9 @@ def constrain(self, other):
|
||||
|
||||
|
||||
def satisfies(self, other):
|
||||
if type(other) != Spec:
|
||||
other = Spec(other)
|
||||
|
||||
def sat(attribute):
|
||||
s = getattr(self, attribute)
|
||||
o = getattr(other, attribute)
|
||||
@ -716,6 +752,7 @@ def copy(self, **kwargs):
|
||||
clone._dup(self, **kwargs)
|
||||
return clone
|
||||
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
if not self.concrete:
|
||||
@ -723,6 +760,27 @@ def version(self):
|
||||
return self.versions[0]
|
||||
|
||||
|
||||
def __getitem__(self, name):
|
||||
"""TODO: does the way this is written make sense?"""
|
||||
for spec in self.preorder_traversal():
|
||||
if spec.name == name:
|
||||
return spec
|
||||
|
||||
raise KeyError("No spec with name %s in %s" % (name, self))
|
||||
|
||||
|
||||
def __contains__(self, spec):
|
||||
"""True if this spec has any dependency that satisfies the supplied
|
||||
spec."""
|
||||
if type(spec) != Spec:
|
||||
spec = Spec(spec)
|
||||
|
||||
for s in self.preorder_traversal():
|
||||
if s.satisfies(spec):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.versions, self.variants,
|
||||
self.architecture, self.compiler, self.dependencies)
|
||||
@ -757,16 +815,20 @@ def tree(self, **kwargs):
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation."""
|
||||
color = kwargs.get('color', False)
|
||||
depth = kwargs.get('depth', False)
|
||||
cover = kwargs.get('cover', 'paths')
|
||||
|
||||
out = ""
|
||||
cur_id = 0
|
||||
ids = {}
|
||||
for d, node in self.preorder_traversal(unique=False, depth=True):
|
||||
for d, node in self.preorder_traversal(cover=cover, depth=True):
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
if not id(node) in ids:
|
||||
cur_id += 1
|
||||
ids[id(node)] = cur_id
|
||||
out += str(ids[id(node)])
|
||||
out += " "+ (" " * d)
|
||||
out += "%-4d" % ids[id(node)]
|
||||
out += (" " * d)
|
||||
out += node.str_no_deps(color=color) + "\n"
|
||||
return out
|
||||
|
||||
@ -777,7 +839,7 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
byname = lambda d: d.name
|
||||
deps = self.preorder_traversal(key=byname, noroot=True)
|
||||
deps = self.preorder_traversal(key=byname, root=False)
|
||||
sorted_deps = sorted(deps, key=byname)
|
||||
dep_string = ''.join("^" + dep.str_no_deps() for dep in sorted_deps)
|
||||
return self.str_no_deps() + dep_string
|
||||
@ -1011,8 +1073,8 @@ class MultipleProviderError(SpecError):
|
||||
"""
|
||||
def __init__(self, vpkg, providers):
|
||||
"""Takes the name of the vpkg"""
|
||||
super(NoProviderError, self).__init__(
|
||||
"Multiple providers found for vpkg '%s': %s"
|
||||
super(MultipleProviderError, self).__init__(
|
||||
"Multiple providers found for '%s': %s"
|
||||
% (vpkg, [str(s) for s in providers]))
|
||||
self.vpkg = vpkg
|
||||
self.providers = providers
|
||||
|
@ -1,4 +1,6 @@
|
||||
import unittest
|
||||
|
||||
import spack.packages as packages
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
@ -35,8 +37,99 @@ def test_concretize_no_deps(self):
|
||||
|
||||
|
||||
def test_concretize_dag(self):
|
||||
spec = Spec('mpileaks')
|
||||
spec.normalize()
|
||||
|
||||
self.check_concretize('callpath')
|
||||
self.check_concretize('mpileaks')
|
||||
self.check_concretize('libelf')
|
||||
|
||||
|
||||
def test_concretize_with_virtual(self):
|
||||
self.check_concretize('mpileaks ^mpi')
|
||||
self.check_concretize('mpileaks ^mpi@:1.1')
|
||||
self.check_concretize('mpileaks ^mpi@2:')
|
||||
self.check_concretize('mpileaks ^mpi@2.1')
|
||||
self.check_concretize('mpileaks ^mpi@2.2')
|
||||
self.check_concretize('mpileaks ^mpi@2.2')
|
||||
self.check_concretize('mpileaks ^mpi@:1')
|
||||
self.check_concretize('mpileaks ^mpi@1.2:2')
|
||||
|
||||
|
||||
def test_concretize_with_restricted_virtual(self):
|
||||
self.check_concretize('mpileaks ^mpich2')
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@1.1')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.1'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@1.2')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.2'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@:1.5')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.5'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@:1.3')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.3'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@:1.2')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.2'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@:1.1')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.1'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@1.1:')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.1:'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@1.5:')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.5:'))
|
||||
|
||||
concrete = self.check_concretize('mpileaks ^mpich2@1.3.1:1.4')
|
||||
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.3.1:1.4'))
|
||||
|
||||
|
||||
def test_concretize_with_provides_when(self):
|
||||
"""Make sure insufficient versions of MPI are not in providers list when
|
||||
we ask for some advanced version.
|
||||
"""
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
|
||||
for spec in packages.providers_for('mpi@2.1')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in packages.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
|
||||
for spec in packages.providers_for('mpi@2.2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in packages.providers_for('mpi@2')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich@:1')
|
||||
for spec in packages.providers_for('mpi@3')))
|
||||
|
||||
self.assertTrue(not any(spec.satisfies('mpich2')
|
||||
for spec in packages.providers_for('mpi@3')))
|
||||
|
||||
|
||||
def test_virtual_is_fully_expanded_for_callpath(self):
|
||||
# force dependence on fake "zmpi" by asking for MPI 10.0
|
||||
spec = Spec('callpath ^mpi@10.0')
|
||||
self.assertIn('mpi', spec.dependencies)
|
||||
self.assertNotIn('fake', spec)
|
||||
|
||||
spec.concretize()
|
||||
|
||||
self.assertIn('zmpi', spec.dependencies)
|
||||
self.assertNotIn('mpi', spec)
|
||||
self.assertIn('fake', spec.dependencies['zmpi'])
|
||||
|
||||
|
||||
def test_virtual_is_fully_expanded_for_mpileaks(self):
|
||||
spec = Spec('mpileaks ^mpi@10.0')
|
||||
self.assertIn('mpi', spec.dependencies)
|
||||
self.assertNotIn('fake', spec)
|
||||
|
||||
spec.concretize()
|
||||
|
||||
self.assertIn('zmpi', spec.dependencies)
|
||||
self.assertIn('callpath', spec.dependencies)
|
||||
self.assertIn('zmpi', spec.dependencies['callpath'].dependencies)
|
||||
self.assertIn('fake', spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
|
||||
|
||||
self.assertNotIn('mpi', spec)
|
||||
|
13
lib/spack/spack/test/mock_packages/fake.py
Normal file
13
lib/spack/spack/test/mock_packages/fake.py
Normal file
@ -0,0 +1,13 @@
|
||||
from spack import *
|
||||
|
||||
class Fake(Package):
|
||||
homepage = "http://www.fake-spack-example.org"
|
||||
url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz"
|
||||
md5 = "foobarbaz"
|
||||
|
||||
versions = '1.0'
|
||||
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
make("install")
|
@ -8,9 +8,10 @@ class Mpich(Package):
|
||||
list_url = "http://www.mpich.org/static/downloads/"
|
||||
list_depth = 2
|
||||
|
||||
versions = '1.0.3, 1.3.2p1, 1.4.1p1, 3.0.4, 3.1b1'
|
||||
versions = '3.0.4, 3.0.3, 3.0.2, 3.0.1, 3.0'
|
||||
|
||||
provides('mpi')
|
||||
provides('mpi@:3', when='@3:')
|
||||
provides('mpi@:1', when='@1:')
|
||||
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
20
lib/spack/spack/test/mock_packages/mpich2.py
Normal file
20
lib/spack/spack/test/mock_packages/mpich2.py
Normal file
@ -0,0 +1,20 @@
|
||||
from spack import *
|
||||
|
||||
class Mpich2(Package):
|
||||
homepage = "http://www.mpich.org"
|
||||
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
|
||||
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
|
||||
|
||||
list_url = "http://www.mpich.org/static/downloads/"
|
||||
list_depth = 2
|
||||
|
||||
versions = '1.5, 1.4, 1.3, 1.2, 1.1, 1.0'
|
||||
|
||||
provides('mpi@:2.0')
|
||||
provides('mpi@:2.1', when='@1.1:')
|
||||
provides('mpi@:2.2', when='@1.2:')
|
||||
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
make("install")
|
18
lib/spack/spack/test/mock_packages/zmpi.py
Normal file
18
lib/spack/spack/test/mock_packages/zmpi.py
Normal file
@ -0,0 +1,18 @@
|
||||
from spack import *
|
||||
|
||||
class Zmpi(Package):
|
||||
"""This is a fake MPI package used to demonstrate virtual package providers
|
||||
with dependencies."""
|
||||
homepage = "http://www.spack-fake-zmpi.org"
|
||||
url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz"
|
||||
md5 = "foobarbaz"
|
||||
|
||||
versions = '1.0'
|
||||
|
||||
provides('mpi@10.0:')
|
||||
depends_on('fake')
|
||||
|
||||
def install(self, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
make("install")
|
@ -28,42 +28,49 @@ def test_conflicting_package_constraints(self):
|
||||
spec.package.validate_dependencies)
|
||||
|
||||
|
||||
def test_preorder_traversal(self):
|
||||
dag = Spec('mpileaks',
|
||||
Spec('callpath',
|
||||
Spec('dyninst',
|
||||
Spec('libdwarf',
|
||||
Spec('libelf')),
|
||||
Spec('libelf')),
|
||||
Spec('mpich')),
|
||||
Spec('mpich'))
|
||||
def test_unique_node_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
unique_names = [
|
||||
'mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'mpich']
|
||||
unique_depths = [0,1,2,3,4,2]
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'zmpi', 'fake']
|
||||
pairs = zip([0,1,2,3,4,2,3], names)
|
||||
|
||||
non_unique_names = [
|
||||
'mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'libelf',
|
||||
'mpich', 'mpich']
|
||||
non_unique_depths = [0,1,2,3,4,3,2,1]
|
||||
traversal = dag.preorder_traversal()
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
self.assertListEqual(
|
||||
[x.name for x in dag.preorder_traversal()],
|
||||
unique_names)
|
||||
traversal = dag.preorder_traversal(depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
self.assertListEqual(
|
||||
[(x, y.name) for x,y in dag.preorder_traversal(depth=True)],
|
||||
zip(unique_depths, unique_names))
|
||||
|
||||
self.assertListEqual(
|
||||
[x.name for x in dag.preorder_traversal(unique=False)],
|
||||
non_unique_names)
|
||||
def test_unique_edge_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
self.assertListEqual(
|
||||
[(x, y.name) for x,y in dag.preorder_traversal(unique=False, depth=True)],
|
||||
zip(non_unique_depths, non_unique_names))
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'libelf', 'zmpi', 'fake', 'zmpi']
|
||||
pairs = zip([0,1,2,3,4,3,2,3,1], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='edges')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='edges', depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_unique_path_traversal(self):
|
||||
dag = Spec('mpileaks ^zmpi')
|
||||
dag.normalize()
|
||||
|
||||
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
|
||||
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
|
||||
pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='paths')
|
||||
self.assertListEqual([x.name for x in traversal], names)
|
||||
|
||||
traversal = dag.preorder_traversal(cover='paths', depth=True)
|
||||
self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
|
||||
|
||||
|
||||
def test_conflicting_spec_constraints(self):
|
||||
@ -270,3 +277,14 @@ def test_normalize_with_virtual_package(self):
|
||||
Spec('mpi')), Spec('mpi'))
|
||||
|
||||
self.assertEqual(str(spec), str(expected_normalized))
|
||||
|
||||
|
||||
def test_contains(self):
|
||||
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
|
||||
self.assertIn(Spec('mpi'), spec)
|
||||
self.assertIn(Spec('libelf'), spec)
|
||||
self.assertIn(Spec('libelf@1.8.11'), spec)
|
||||
self.assertNotIn(Spec('libelf@1.8.12'), spec)
|
||||
self.assertIn(Spec('libdwarf'), spec)
|
||||
self.assertNotIn(Spec('libgoblin'), spec)
|
||||
self.assertIn(Spec('mpileaks'), spec)
|
||||
|
@ -113,3 +113,16 @@ def copy(self):
|
||||
for key in self:
|
||||
clone[key] = self[key].copy()
|
||||
return clone
|
||||
|
||||
|
||||
def in_function(function_name):
|
||||
"""True if the caller was called from some function with
|
||||
the supplied Name, False otherwise."""
|
||||
stack = inspect.stack()
|
||||
try:
|
||||
for elt in stack[2:]:
|
||||
if elt[3] == function_name:
|
||||
return True
|
||||
return False
|
||||
finally:
|
||||
del stack
|
||||
|
Loading…
Reference in New Issue
Block a user