Directive inheritance: laziness for the win (#2623)

* inheritance of directives: using meta-classes to inject attributes coming from directives into packages + lazy directives

* _dep_types -> dependency_types
* using a meta-class to inject directives into packages
* directives are lazy

fixes #2466

* directives.py: allows for multiple inheritance. Added blank lines as suggested by @tgamblin

* directives.py: added a test for simple inheritance of directives

* Minor improvement requested by @tgamblin

CMakePackage: importing names from spack.directives
directives: wrap __new__ to respect pep8

* Refactoring requested by @tgamblin

directives: removed global variables in favor of class variables. Simplified the interface for directives (they return a callable on a package or a list of them).
This commit is contained in:
Massimiliano Culpo 2016-12-28 21:37:02 +01:00 committed by Todd Gamblin
parent 857dac88c8
commit 17b13b161b
11 changed files with 282 additions and 203 deletions

View File

@ -342,38 +342,6 @@ def match(string):
return match return match
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
def __getattr__(self, name):
return dictionary[name]
def __setattr__(self, name, value):
dictionary[name] = value
def setdefault(self, *args):
return dictionary.setdefault(*args)
def get(self, *args):
return dictionary.get(*args)
def keys(self):
return dictionary.keys()
def values(self):
return dictionary.values()
def items(self):
return dictionary.items()
def __iter__(self):
return iter(dictionary)
return wrapper()
def dedupe(sequence): def dedupe(sequence):
"""Yields a stable de-duplication of an hashable sequence """Yields a stable de-duplication of an hashable sequence

View File

@ -30,6 +30,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.build_environment import spack.build_environment
from llnl.util.filesystem import working_dir, join_path from llnl.util.filesystem import working_dir, join_path
from spack.directives import depends_on
from spack.package import PackageBase from spack.package import PackageBase
@ -49,6 +50,8 @@ class CMakePackage(PackageBase):
# build-system class we are using # build-system class we are using
build_system_class = 'CMakePackage' build_system_class = 'CMakePackage'
depends_on('cmake', type='build')
def build_type(self): def build_type(self):
"""Override to provide the correct build_type in case a complex """Override to provide the correct build_type in case a complex
logic is needed logic is needed

View File

@ -46,41 +46,94 @@ class OpenMpi(Package):
""" """
import re import collections
import os.path
import functools import functools
import inspect
import os.path
import re
from llnl.util.lang import * import llnl.util.lang
from llnl.util.filesystem import join_path
import spack import spack
import spack.spec
import spack.error import spack.error
import spack.spec
import spack.url import spack.url
from spack.version import Version from llnl.util.filesystem import join_path
from spack.patch import Patch
from spack.variant import Variant
from spack.spec import Spec, parse_anonymous_spec
from spack.resource import Resource
from spack.fetch_strategy import from_kwargs from spack.fetch_strategy import from_kwargs
from spack.patch import Patch
from spack.resource import Resource
from spack.spec import Spec, parse_anonymous_spec
from spack.variant import Variant
from spack.version import Version
__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version', 'variant', __all__ = []
'resource']
#
# This is a list of all directives, built up as they are defined in
# this file.
#
directives = {}
def ensure_dicts(pkg): class DirectiveMetaMixin(type):
"""Ensure that a package has all the dicts required by directives.""" """Flushes the directives that were temporarily stored in the staging
for name, d in directives.items(): area into the package.
d.ensure_dicts(pkg) """
# Set of all known directives
_directive_names = set()
_directives_to_be_executed = []
class directive(object): def __new__(mcs, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict['_directives_to_be_executed'] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict['_directives_to_be_executed'].extend(
directive_from_base
)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict['_directives_to_be_executed'] = [
x for x in llnl.util.lang.dedupe(
attr_dict['_directives_to_be_executed']
)
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMetaMixin._directives_to_be_executed:
attr_dict['_directives_to_be_executed'].extend(
DirectiveMetaMixin._directives_to_be_executed
)
DirectiveMetaMixin._directives_to_be_executed = []
return super(DirectiveMetaMixin, mcs).__new__(
mcs, name, bases, attr_dict
)
def __init__(cls, name, bases, attr_dict):
# The class is being created: if it is a package we must ensure
# that the directives are called on the class to set it up
module = inspect.getmodule(cls)
if 'spack.pkg' in module.__name__:
# Package name as taken
# from llnl.util.lang.get_calling_module_name
pkg_name = module.__name__.split('.')[-1]
setattr(cls, 'name', pkg_name)
# Ensure the presence of the dictionaries associated
# with the directives
for d in DirectiveMetaMixin._directive_names:
setattr(cls, d, {})
# Lazy execution of directives
for command in cls._directives_to_be_executed:
command(cls)
super(DirectiveMetaMixin, cls).__init__(name, bases, attr_dict)
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives. """Decorator for Spack directives.
Spack directives allow you to modify a package while it is being Spack directives allow you to modify a package while it is being
@ -119,49 +172,47 @@ class Foo(Package):
packages to the core. packages to the core.
""" """
global __all__
def __init__(self, dicts=None):
if isinstance(dicts, basestring): if isinstance(dicts, basestring):
dicts = (dicts, ) dicts = (dicts, )
elif type(dicts) not in (list, tuple): if not isinstance(dicts, collections.Sequence):
raise TypeError( message = "dicts arg must be list, tuple, or string. Found {0}"
"dicts arg must be list, tuple, or string. Found %s" % raise TypeError(message.format(type(dicts)))
type(dicts)) # Add the dictionary names if not already there
DirectiveMetaMixin._directive_names |= set(dicts)
self.dicts = dicts # This decorator just returns the directive functions
def _decorator(decorated_function):
__all__.append(decorated_function.__name__)
def ensure_dicts(self, pkg): @functools.wraps(decorated_function)
"""Ensure that a package has the dicts required by this directive.""" def _wrapper(*args, **kwargs):
for d in self.dicts: # A directive returns either something that is callable on a
if not hasattr(pkg, d): # package or a sequence of them
setattr(pkg, d, {}) values = decorated_function(*args, **kwargs)
attr = getattr(pkg, d) # ...so if it is not a sequence make it so
if not isinstance(attr, dict): if not isinstance(values, collections.Sequence):
raise spack.error.SpackError( values = (values, )
"Package %s has non-dict %s attribute!" % (pkg, d))
def __call__(self, directive_function): DirectiveMetaMixin._directives_to_be_executed.extend(values)
directives[directive_function.__name__] = self return _wrapper
@functools.wraps(directive_function) return _decorator
def wrapped(*args, **kwargs):
pkg = DictWrapper(caller_locals())
self.ensure_dicts(pkg)
pkg.name = get_calling_module_name()
return directive_function(pkg, *args, **kwargs)
return wrapped directive = DirectiveMetaMixin.directive
@directive('versions') @directive('versions')
def version(pkg, ver, checksum=None, **kwargs): def version(ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it. """Adds a version and metadata describing how to fetch it.
Metadata is just stored as a dict in the package's versions Metadata is just stored as a dict in the package's versions
dictionary. Package must turn it into a valid fetch strategy dictionary. Package must turn it into a valid fetch strategy
later. later.
""" """
def _execute(pkg):
# TODO: checksum vs md5 distinction is confusing -- fix this. # TODO: checksum vs md5 distinction is confusing -- fix this.
# special case checksum for backward compatibility # special case checksum for backward compatibility
if checksum: if checksum:
@ -169,6 +220,7 @@ def version(pkg, ver, checksum=None, **kwargs):
# Store kwargs for the package to later with a fetch_strategy. # Store kwargs for the package to later with a fetch_strategy.
pkg.versions[Version(ver)] = kwargs pkg.versions[Version(ver)] = kwargs
return _execute
def _depends_on(pkg, spec, when=None, type=None): def _depends_on(pkg, spec, when=None, type=None):
@ -199,7 +251,7 @@ def _depends_on(pkg, spec, when=None, type=None):
if pkg.name == dep_spec.name: if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name) raise CircularReferenceError('depends_on', pkg.name)
pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set()) pkg_deptypes = pkg.dependency_types.setdefault(dep_spec.name, set())
for deptype in type: for deptype in type:
pkg_deptypes.add(deptype) pkg_deptypes.add(deptype)
@ -210,17 +262,19 @@ def _depends_on(pkg, spec, when=None, type=None):
conditions[when_spec] = dep_spec conditions[when_spec] = dep_spec
@directive(('dependencies', '_deptypes')) @directive(('dependencies', 'dependency_types'))
def depends_on(pkg, spec, when=None, type=None): def depends_on(spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply. """Creates a dict of deps with specs defining when they apply.
This directive is to be used inside a Package definition to declare This directive is to be used inside a Package definition to declare
that the package requires other packages to be built first. that the package requires other packages to be built first.
@see The section "Dependency specs" in the Spack Packaging Guide.""" @see The section "Dependency specs" in the Spack Packaging Guide."""
def _execute(pkg):
_depends_on(pkg, spec, when=when, type=type) _depends_on(pkg, spec, when=when, type=type)
return _execute
@directive(('extendees', 'dependencies', '_deptypes')) @directive(('extendees', 'dependencies', 'dependency_types'))
def extends(pkg, spec, **kwargs): def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix. """Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module This is for Python and other language modules where the module
@ -234,20 +288,24 @@ def extends(pkg, spec, **kwargs):
mechanism. mechanism.
""" """
def _execute(pkg):
if pkg.extendees: if pkg.extendees:
raise DirectiveError("Packages can extend at most one other package.") msg = 'Packages can extend at most one other package.'
raise DirectiveError(msg)
when = kwargs.pop('when', pkg.name) when = kwargs.pop('when', pkg.name)
_depends_on(pkg, spec, when=when) _depends_on(pkg, spec, when=when)
pkg.extendees[spec] = (Spec(spec), kwargs) pkg.extendees[spec] = (Spec(spec), kwargs)
return _execute
@directive('provided') @directive('provided')
def provides(pkg, *specs, **kwargs): def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides """Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack 'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency. can use the providing package to satisfy the dependency.
""" """
def _execute(pkg):
spec_string = kwargs.get('when', pkg.name) spec_string = kwargs.get('when', pkg.name)
provider_spec = parse_anonymous_spec(spec_string, pkg.name) provider_spec = parse_anonymous_spec(spec_string, pkg.name)
@ -256,41 +314,43 @@ def provides(pkg, *specs, **kwargs):
if pkg.name == provided_spec.name: if pkg.name == provided_spec.name:
raise CircularReferenceError('depends_on', pkg.name) raise CircularReferenceError('depends_on', pkg.name)
pkg.provided[provided_spec] = provider_spec pkg.provided[provided_spec] = provider_spec
return _execute
@directive('patches') @directive('patches')
def patch(pkg, url_or_filename, level=1, when=None, **kwargs): def patch(url_or_filename, level=1, when=None, **kwargs):
"""Packages can declare patches to apply to source. You can """Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular optionally provide a when spec to indicate that a particular
patch should only be applied when the package's spec meets patch should only be applied when the package's spec meets
certain conditions (e.g. a particular version). certain conditions (e.g. a particular version).
""" """
if when is None: def _execute(pkg):
when = pkg.name constraint = pkg.name if when is None else when
when_spec = parse_anonymous_spec(when, pkg.name) when_spec = parse_anonymous_spec(constraint, pkg.name)
cur_patches = pkg.patches.setdefault(when_spec, []) cur_patches = pkg.patches.setdefault(when_spec, [])
# if this spec is identical to some other, then append this # if this spec is identical to some other, then append this
# patch to the existing list. # patch to the existing list.
cur_patches.append(Patch.create(pkg, url_or_filename, level, **kwargs)) cur_patches.append(Patch.create(pkg, url_or_filename, level, **kwargs))
return _execute
@directive('variants') @directive('variants')
def variant(pkg, name, default=False, description=""): def variant(name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default """Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description.""" value (on or off) as well as a text description."""
default = default
description = str(description).strip() description = str(description).strip()
def _execute(pkg):
if not re.match(spack.spec.identifier_re, name): if not re.match(spack.spec.identifier_re, name):
raise DirectiveError("Invalid variant name in %s: '%s'" % msg = 'Invalid variant name in {0}: \'{1}\''
(pkg.name, name)) raise DirectiveError(msg.format(pkg.name, name))
pkg.variants[name] = Variant(default, description) pkg.variants[name] = Variant(default, description)
return _execute
@directive('resources') @directive('resources')
def resource(pkg, **kwargs): def resource(**kwargs):
"""Define an external resource to be fetched and staged when building the """Define an external resource to be fetched and staged when building the
package. Based on the keywords present in the dictionary the appropriate package. Based on the keywords present in the dictionary the appropriate
FetchStrategy will be used for the resource. Resources are fetched and FetchStrategy will be used for the resource. Resources are fetched and
@ -306,29 +366,36 @@ def resource(pkg, **kwargs):
* 'placement' : (optional) gives the possibility to fine tune how the * 'placement' : (optional) gives the possibility to fine tune how the
resource is moved into the main package stage area. resource is moved into the main package stage area.
""" """
def _execute(pkg):
when = kwargs.get('when', pkg.name) when = kwargs.get('when', pkg.name)
destination = kwargs.get('destination', "") destination = kwargs.get('destination', "")
placement = kwargs.get('placement', None) placement = kwargs.get('placement', None)
# Check if the path is relative # Check if the path is relative
if os.path.isabs(destination): if os.path.isabs(destination):
message = "The destination keyword of a resource directive can't be" message = 'The destination keyword of a resource directive '
" an absolute path.\n" 'can\'t be an absolute path.\n'
message += "\tdestination : '{dest}\n'".format(dest=destination) message += "\tdestination : '{dest}\n'".format(dest=destination)
raise RuntimeError(message) raise RuntimeError(message)
# Check if the path falls within the main package stage area # Check if the path falls within the main package stage area
test_path = 'stage_folder_root' test_path = 'stage_folder_root'
normalized_destination = os.path.normpath(join_path(test_path, destination) normalized_destination = os.path.normpath(
join_path(test_path, destination)
) # Normalized absolute path ) # Normalized absolute path
if test_path not in normalized_destination: if test_path not in normalized_destination:
message = "The destination folder of a resource must fall within the" message = "The destination folder of a resource must fall "
" main package stage directory.\n" "within the main package stage directory.\n"
message += "\tdestination : '{dest}'\n".format(dest=destination) message += "\tdestination : '{dest}'\n".format(dest=destination)
raise RuntimeError(message) raise RuntimeError(message)
when_spec = parse_anonymous_spec(when, pkg.name) when_spec = parse_anonymous_spec(when, pkg.name)
resources = pkg.resources.setdefault(when_spec, []) resources = pkg.resources.setdefault(when_spec, [])
name = kwargs.get('name') name = kwargs.get('name')
fetcher = from_kwargs(**kwargs) fetcher = from_kwargs(**kwargs)
resources.append(Resource(name, fetcher, destination, placement)) resources.append(Resource(name, fetcher, destination, placement))
return _execute
class DirectiveError(spack.error.SpackError): class DirectiveError(spack.error.SpackError):

View File

@ -134,7 +134,7 @@ def copy(self):
return other return other
class PackageMeta(type): class PackageMeta(spack.directives.DirectiveMetaMixin):
"""Conveniently transforms attributes to permit extensible phases """Conveniently transforms attributes to permit extensible phases
Iterates over the attribute 'phases' and creates / updates private Iterates over the attribute 'phases' and creates / updates private
@ -232,10 +232,6 @@ def sanity_check(cls, *args):
_append_checks('sanity_checks') _append_checks('sanity_checks')
return super(PackageMeta, meta).__new__(meta, name, bases, attr_dict) return super(PackageMeta, meta).__new__(meta, name, bases, attr_dict)
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
spack.directives.ensure_dicts(cls)
class PackageBase(object): class PackageBase(object):
"""This is the superclass for all spack packages. """This is the superclass for all spack packages.
@ -781,7 +777,7 @@ def fetcher(self, f):
def dependencies_of_type(self, *deptypes): def dependencies_of_type(self, *deptypes):
"""Get subset of the dependencies with certain types.""" """Get subset of the dependencies with certain types."""
return dict((name, conds) for name, conds in self.dependencies.items() return dict((name, conds) for name, conds in self.dependencies.items()
if any(d in self._deptypes[name] for d in deptypes)) if any(d in self.dependency_types[name] for d in deptypes))
@property @property
def extendee_spec(self): def extendee_spec(self):

View File

@ -1762,7 +1762,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
for dep_name in pkg.dependencies: for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None. # Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name) pkg_dep = self._evaluate_dependency_conditions(dep_name)
deptypes = pkg._deptypes[dep_name] deptypes = pkg.dependency_types[dep_name]
# If pkg_dep is a dependency, merge it. # If pkg_dep is a dependency, merge it.
if pkg_dep: if pkg_dep:
changed |= self._merge_dependency( changed |= self._merge_dependency(

View File

@ -255,7 +255,7 @@ def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps):
# Change dep spec # Change dep spec
# XXX(deptype): handle deptypes. # XXX(deptype): handle deptypes.
pkg.dependencies[spec.name] = {Spec(pkg_name): spec} pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
pkg._deptypes[spec.name] = set(deptypes) pkg.dependency_types[spec.name] = set(deptypes)
def cleanmock(self): def cleanmock(self):
"""Restore the real packages path after any test.""" """Restore the real packages path after any test."""

View File

@ -27,6 +27,7 @@
from spack.repository import Repo from spack.repository import Repo
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
from spack.util.naming import mod_to_class from spack.util.naming import mod_to_class
from spack.spec import *
class PackagesTest(MockPackagesTest): class PackagesTest(MockPackagesTest):
@ -89,3 +90,28 @@ def test_import_namespace_container_modules(self):
import spack.pkg.builtin.mock # noqa import spack.pkg.builtin.mock # noqa
import spack.pkg.builtin.mock as m # noqa import spack.pkg.builtin.mock as m # noqa
from spack.pkg.builtin import mock # noqa from spack.pkg.builtin import mock # noqa
def test_inheritance_of_diretives(self):
p = spack.repo.get('simple_inheritance')
# Check dictionaries that should have been filled by directives
self.assertEqual(len(p.dependencies), 3)
self.assertTrue('cmake' in p.dependencies)
self.assertTrue('openblas' in p.dependencies)
self.assertTrue('mpi' in p.dependencies)
self.assertEqual(len(p.provided), 2)
# Check that Spec instantiation behaves as we expect
s = Spec('simple_inheritance')
s.concretize()
self.assertTrue('^cmake' in s)
self.assertTrue('^openblas' in s)
self.assertTrue('+openblas' in s)
self.assertTrue('mpi' in s)
s = Spec('simple_inheritance~openblas')
s.concretize()
self.assertTrue('^cmake' in s)
self.assertTrue('^openblas' not in s)
self.assertTrue('~openblas' in s)
self.assertTrue('mpi' in s)

View File

@ -0,0 +1,24 @@
from spack import *
class BaseWithDirectives(Package):
depends_on('cmake', type='build')
depends_on('mpi')
variant('openblas', description='Activates openblas', default=True)
provides('service1')
class SimpleInheritance(BaseWithDirectives):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/simple-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('openblas', when='+openblas')
provides('lapack', when='+openblas')
def install(self, spec, prefix):
pass

View File

@ -90,7 +90,6 @@ class Dealii(CMakePackage):
"boost@1.59.0:+thread+system+serialization+iostreams+mpi+python", "boost@1.59.0:+thread+system+serialization+iostreams+mpi+python",
when='@8.5.0:+mpi+python') when='@8.5.0:+mpi+python')
depends_on("bzip2") depends_on("bzip2")
depends_on("cmake", type='build')
depends_on("lapack") depends_on("lapack")
depends_on("muparser") depends_on("muparser")
depends_on("suite-sparse") depends_on("suite-sparse")

View File

@ -43,5 +43,3 @@ class Openjpeg(CMakePackage):
version('2.0', 'cdf266530fee8af87454f15feb619609') version('2.0', 'cdf266530fee8af87454f15feb619609')
version('1.5.2', '545f98923430369a6b046ef3632ef95c') version('1.5.2', '545f98923430369a6b046ef3632ef95c')
version('1.5.1', 'd774e4b5a0db5f0f171c4fc0aabfa14e') version('1.5.1', 'd774e4b5a0db5f0f171c4fc0aabfa14e')
depends_on('cmake', type='build')

View File

@ -36,8 +36,6 @@ class YamlCpp(CMakePackage):
variant('fpic', default=False, variant('fpic', default=False,
description='Build with position independent code') description='Build with position independent code')
depends_on('cmake', type='build')
depends_on('boost', when='@:0.5.3') depends_on('boost', when='@:0.5.3')
def cmake_args(self): def cmake_args(self):