specs: use lazy lexicographic comparison instead of key_ordering
We have been using the `@llnl.util.lang.key_ordering` decorator for specs
and most of their components. This leverages the fact that in Python,
tuple comparison is lexicographic. It allows you to implement a
`_cmp_key` method on your class, and have `__eq__`, `__lt__`, etc.
implemented automatically using that key. For example, you might use
tuple keys to implement comparison, e.g.:
```python
class Widget:
    # author implements this
    def _cmp_key(self):
        return (
            self.a,
            self.b,
            (self.c, self.d),
            self.e
        )
    # operators are generated by @key_ordering
    def __eq__(self, other):
        return self._cmp_key() == other._cmp_key()
    def __lt__(self):
        return self._cmp_key() < other._cmp_key()
    # etc.
```
The issue there for simple comparators is that we have to bulid the
tuples *and* we have to generate all the values in them up front. When
implementing comparisons for large data structures, this can be costly.
This PR replaces `@key_ordering` with a new decorator,
`@lazy_lexicographic_ordering`. Lazy lexicographic comparison maps the
tuple comparison shown above to generator functions. Instead of comparing
based on pre-constructed tuple keys, users of this decorator can compare
using elements from a generator. So, you'd write:
```python
@lazy_lexicographic_ordering
class Widget:
    def _cmp_iter(self):
        yield a
        yield b
        def cd_fun():
            yield c
            yield d
        yield cd_fun
        yield e
    # operators are added by decorator (but are a bit more complex)
There are no tuples that have to be pre-constructed, and the generator
does not have to complete. Instead of tuples, we simply make functions
that lazily yield what would've been in the tuple. If a yielded value is
a `callable`, the comparison functions will call it and recursively
compar it. The comparator just walks the data structure like you'd expect
it to.
The ``@lazy_lexicographic_ordering`` decorator handles the details of
implementing comparison operators, and the ``Widget`` implementor only
has to worry about writing ``_cmp_iter``, and making sure the elements in
it are also comparable.
Using this PR shaves another 1.5 sec off the runtime of `spack buildcache
list`, and it also speeds up Spec comparison by about 30%. The runtime
improvement comes mostly from *not* calling `hash()` `_cmp_iter()`.
			
			
This commit is contained in:
		 Todd Gamblin
					Todd Gamblin
				
			
				
					committed by
					
						 Greg Becker
						Greg Becker
					
				
			
			
				
	
			
			
			 Greg Becker
						Greg Becker
					
				
			
						parent
						
							fd12cba18b
						
					
				
				
					commit
					01a6adb5f7
				
			| @@ -14,6 +14,11 @@ | |||||||
| from six import string_types | from six import string_types | ||||||
| import sys | import sys | ||||||
| 
 | 
 | ||||||
|  | if sys.version_info < (3, 0): | ||||||
|  |     from itertools import izip_longest  # novm | ||||||
|  |     zip_longest = izip_longest | ||||||
|  | else: | ||||||
|  |     from itertools import zip_longest  # novm | ||||||
| 
 | 
 | ||||||
| if sys.version_info >= (3, 3): | if sys.version_info >= (3, 3): | ||||||
|     from collections.abc import Hashable, MutableMapping  # novm |     from collections.abc import Hashable, MutableMapping  # novm | ||||||
| @@ -227,48 +232,222 @@ def list_modules(directory, **kwargs): | |||||||
|                 yield re.sub('.py$', '', name) |                 yield re.sub('.py$', '', name) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def key_ordering(cls): | def decorator_with_or_without_args(decorator): | ||||||
|     """Decorates a class with extra methods that implement rich comparison |     """Allows a decorator to be used with or without arguments, e.g.:: | ||||||
|        operations and ``__hash__``.  The decorator assumes that the class |  | ||||||
|        implements a function called ``_cmp_key()``.  The rich comparison |  | ||||||
|        operations will compare objects using this key, and the ``__hash__`` |  | ||||||
|        function will return the hash of this key. |  | ||||||
| 
 | 
 | ||||||
|        If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``, |         # Calls the decorator function some args | ||||||
|        ``__gt__``, or ``__ge__`` defined, this decorator will overwrite them. |         @decorator(with, arguments, and=kwargs) | ||||||
|  | 
 | ||||||
|  |     or:: | ||||||
|  | 
 | ||||||
|  |         # Calls the decorator function with zero arguments | ||||||
|  |         @decorator | ||||||
|  | 
 | ||||||
|  |     """ | ||||||
|  |     # See https://stackoverflow.com/questions/653368 for more on this | ||||||
|  |     @functools.wraps(decorator) | ||||||
|  |     def new_dec(*args, **kwargs): | ||||||
|  |         if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): | ||||||
|  |             # actual decorated function | ||||||
|  |             return decorator(args[0]) | ||||||
|  |         else: | ||||||
|  |             # decorator arguments | ||||||
|  |             return lambda realf: decorator(realf, *args, **kwargs) | ||||||
|  | 
 | ||||||
|  |     return new_dec | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | #: sentinel for testing that iterators are done in lazy_lexicographic_ordering | ||||||
|  | done = object() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def tuplify(seq): | ||||||
|  |     """Helper for lazy_lexicographic_ordering().""" | ||||||
|  |     return tuple((tuplify(x) if callable(x) else x) for x in seq()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def lazy_eq(lseq, rseq): | ||||||
|  |     """Equality comparison for two lazily generated sequences. | ||||||
|  | 
 | ||||||
|  |     See ``lazy_lexicographic_ordering``. | ||||||
|  |     """ | ||||||
|  |     liter = lseq()  # call generators | ||||||
|  |     riter = rseq() | ||||||
|  | 
 | ||||||
|  |     # zip_longest is implemented in native code, so use it for speed. | ||||||
|  |     # use zip_longest instead of zip because it allows us to tell | ||||||
|  |     # which iterator was longer. | ||||||
|  |     for left, right in zip_longest(liter, riter, fillvalue=done): | ||||||
|  |         if (left is done) or (right is done): | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |         # recursively enumerate any generators, otherwise compare | ||||||
|  |         equal = lazy_eq(left, right) if callable(left) else left == right | ||||||
|  |         if not equal: | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |     return True | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def lazy_lt(lseq, rseq): | ||||||
|  |     """Less-than comparison for two lazily generated sequences. | ||||||
|  | 
 | ||||||
|  |     See ``lazy_lexicographic_ordering``. | ||||||
|  |     """ | ||||||
|  |     liter = lseq() | ||||||
|  |     riter = rseq() | ||||||
|  | 
 | ||||||
|  |     for left, right in zip_longest(liter, riter, fillvalue=done): | ||||||
|  |         if (left is done) or (right is done): | ||||||
|  |             return left is done  # left was shorter than right | ||||||
|  | 
 | ||||||
|  |         sequence = callable(left) | ||||||
|  |         equal = lazy_eq(left, right) if sequence else left == right | ||||||
|  |         if equal: | ||||||
|  |             continue | ||||||
|  | 
 | ||||||
|  |         if sequence: | ||||||
|  |             return lazy_lt(left, right) | ||||||
|  |         if left is None: | ||||||
|  |             return True | ||||||
|  |         if right is None: | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |         return left < right | ||||||
|  | 
 | ||||||
|  |     return False  # if equal, return False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @decorator_with_or_without_args | ||||||
|  | def lazy_lexicographic_ordering(cls, set_hash=True): | ||||||
|  |     """Decorates a class with extra methods that implement rich comparison. | ||||||
|  | 
 | ||||||
|  |     This is a lazy version of the tuple comparison used frequently to | ||||||
|  |     implement comparison in Python. Given some objects with fields, you | ||||||
|  |     might use tuple keys to implement comparison, e.g.:: | ||||||
|  | 
 | ||||||
|  |         class Widget: | ||||||
|  |             def _cmp_key(self): | ||||||
|  |                 return ( | ||||||
|  |                     self.a, | ||||||
|  |                     self.b, | ||||||
|  |                     (self.c, self.d), | ||||||
|  |                     self.e | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             def __eq__(self, other): | ||||||
|  |                 return self._cmp_key() == other._cmp_key() | ||||||
|  | 
 | ||||||
|  |             def __lt__(self): | ||||||
|  |                 return self._cmp_key() < other._cmp_key() | ||||||
|  | 
 | ||||||
|  |             # etc. | ||||||
|  | 
 | ||||||
|  |     Python would compare ``Widgets`` lexicographically based on their | ||||||
|  |     tuples. The issue there for simple comparators is that we have to | ||||||
|  |     bulid the tuples *and* we have to generate all the values in them up | ||||||
|  |     front. When implementing comparisons for large data structures, this | ||||||
|  |     can be costly. | ||||||
|  | 
 | ||||||
|  |     Lazy lexicographic comparison maps the tuple comparison shown above | ||||||
|  |     to generator functions. Instead of comparing based on pre-constructed | ||||||
|  |     tuple keys, users of this decorator can compare using elements from a | ||||||
|  |     generator. So, you'd write:: | ||||||
|  | 
 | ||||||
|  |         @lazy_lexicographic_ordering | ||||||
|  |         class Widget: | ||||||
|  |             def _cmp_iter(self): | ||||||
|  |                 yield a | ||||||
|  |                 yield b | ||||||
|  |                 def cd_fun(): | ||||||
|  |                     yield c | ||||||
|  |                     yield d | ||||||
|  |                 yield cd_fun | ||||||
|  |                 yield e | ||||||
|  | 
 | ||||||
|  |             # operators are added by decorator | ||||||
|  | 
 | ||||||
|  |     There are no tuples preconstructed, and the generator does not have | ||||||
|  |     to complete. Instead of tuples, we simply make functions that lazily | ||||||
|  |     yield what would've been in the tuple. The | ||||||
|  |     ``@lazy_lexicographic_ordering`` decorator handles the details of | ||||||
|  |     implementing comparison operators, and the ``Widget`` implementor | ||||||
|  |     only has to worry about writing ``_cmp_iter``, and making sure the | ||||||
|  |     elements in it are also comparable. | ||||||
|  | 
 | ||||||
|  |     Some things to note: | ||||||
|  | 
 | ||||||
|  |       * If a class already has ``__eq__``, ``__ne__``, ``__lt__``, | ||||||
|  |         ``__le__``, ``__gt__``, ``__ge__``, or ``__hash__`` defined, this | ||||||
|  |         decorator will overwrite them. | ||||||
|  | 
 | ||||||
|  |       * If ``set_hash`` is ``False``, this will not overwrite | ||||||
|  |         ``__hash__``. | ||||||
|  | 
 | ||||||
|  |       * This class uses Python 2 None-comparison semantics. If you yield | ||||||
|  |         None and it is compared to a non-None type, None will always be | ||||||
|  |         less than the other object. | ||||||
| 
 | 
 | ||||||
|     Raises: |     Raises: | ||||||
|            TypeError: If the class does not have a ``_cmp_key`` method |         TypeError: If the class does not have a ``_cmp_iter`` method | ||||||
|  | 
 | ||||||
|     """ |     """ | ||||||
|     def setter(name, value): |     if not has_method(cls, "_cmp_iter"): | ||||||
|         value.__name__ = name |         raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__) | ||||||
|         setattr(cls, name, value) |  | ||||||
| 
 | 
 | ||||||
|     if not has_method(cls, '_cmp_key'): |     # comparison operators are implemented in terms of lazy_eq and lazy_lt | ||||||
|         raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__) |     def eq(self, other): | ||||||
|  |         if self is other: | ||||||
|  |             return True | ||||||
|  |         return (other is not None) and lazy_eq(self._cmp_iter, other._cmp_iter) | ||||||
| 
 | 
 | ||||||
|     setter('__eq__', |     def lt(self, other): | ||||||
|            lambda s, o: |         if self is other: | ||||||
|            (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) |             return False | ||||||
|     setter('__lt__', |         return (other is not None) and lazy_lt(self._cmp_iter, other._cmp_iter) | ||||||
|            lambda s, o: o is not None and s._cmp_key() < o._cmp_key()) |  | ||||||
|     setter('__le__', |  | ||||||
|            lambda s, o: o is not None and s._cmp_key() <= o._cmp_key()) |  | ||||||
| 
 | 
 | ||||||
|     setter('__ne__', |     def ne(self, other): | ||||||
|            lambda s, o: |         if self is other: | ||||||
|            (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) |             return False | ||||||
|     setter('__gt__', |         return (other is None) or not lazy_eq(self._cmp_iter, other._cmp_iter) | ||||||
|            lambda s, o: o is None or s._cmp_key() > o._cmp_key()) |  | ||||||
|     setter('__ge__', |  | ||||||
|            lambda s, o: o is None or s._cmp_key() >= o._cmp_key()) |  | ||||||
| 
 | 
 | ||||||
|     setter('__hash__', lambda self: hash(self._cmp_key())) |     def gt(self, other): | ||||||
|  |         if self is other: | ||||||
|  |             return False | ||||||
|  |         return (other is None) or lazy_lt(other._cmp_iter, self._cmp_iter) | ||||||
|  | 
 | ||||||
|  |     def le(self, other): | ||||||
|  |         if self is other: | ||||||
|  |             return True | ||||||
|  |         return (other is not None) and not lazy_lt(other._cmp_iter, | ||||||
|  |                                                    self._cmp_iter) | ||||||
|  | 
 | ||||||
|  |     def ge(self, other): | ||||||
|  |         if self is other: | ||||||
|  |             return True | ||||||
|  |         return (other is None) or not lazy_lt(self._cmp_iter, other._cmp_iter) | ||||||
|  | 
 | ||||||
|  |     def h(self): | ||||||
|  |         return hash(tuplify(self._cmp_iter)) | ||||||
|  | 
 | ||||||
|  |     def add_func_to_class(name, func): | ||||||
|  |         """Add a function to a class with a particular name.""" | ||||||
|  |         func.__name__ = name | ||||||
|  |         setattr(cls, name, func) | ||||||
|  | 
 | ||||||
|  |     add_func_to_class("__eq__", eq) | ||||||
|  |     add_func_to_class("__ne__", ne) | ||||||
|  |     add_func_to_class("__lt__", lt) | ||||||
|  |     add_func_to_class("__le__", le) | ||||||
|  |     add_func_to_class("__gt__", gt) | ||||||
|  |     add_func_to_class("__ge__", ge) | ||||||
|  |     if set_hash: | ||||||
|  |         add_func_to_class("__hash__", h) | ||||||
| 
 | 
 | ||||||
|     return cls |     return cls | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @key_ordering | @lazy_lexicographic_ordering | ||||||
| class HashableMap(MutableMapping): | class HashableMap(MutableMapping): | ||||||
|     """This is a hashable, comparable dictionary.  Hash is performed on |     """This is a hashable, comparable dictionary.  Hash is performed on | ||||||
|        a tuple of the values in the dictionary.""" |        a tuple of the values in the dictionary.""" | ||||||
| @@ -291,8 +470,9 @@ def __len__(self): | |||||||
|     def __delitem__(self, key): |     def __delitem__(self, key): | ||||||
|         del self.dict[key] |         del self.dict[key] | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return tuple(sorted(self.values())) |         for _, v in sorted(self.items()): | ||||||
|  |             yield v | ||||||
| 
 | 
 | ||||||
|     def copy(self): |     def copy(self): | ||||||
|         """Type-agnostic clone method.  Preserves subclass type.""" |         """Type-agnostic clone method.  Preserves subclass type.""" | ||||||
|   | |||||||
| @@ -65,7 +65,7 @@ | |||||||
| import six | import six | ||||||
| 
 | 
 | ||||||
| import llnl.util.tty as tty | import llnl.util.tty as tty | ||||||
| from llnl.util.lang import memoized, list_modules, key_ordering | import llnl.util.lang as lang | ||||||
| 
 | 
 | ||||||
| import spack.compiler | import spack.compiler | ||||||
| import spack.compilers | import spack.compilers | ||||||
| @@ -232,7 +232,7 @@ def optimization_flags(self, compiler): | |||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class Platform(object): | class Platform(object): | ||||||
|     """ Abstract class that each type of Platform will subclass. |     """ Abstract class that each type of Platform will subclass. | ||||||
|         Will return a instance of it once it is returned. |         Will return a instance of it once it is returned. | ||||||
| @@ -329,23 +329,27 @@ def __repr__(self): | |||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return self.name |         return self.name | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         t_keys = ''.join(str(t._cmp_key()) for t in |         yield self.name | ||||||
|                          sorted(self.targets.values())) |         yield self.default | ||||||
|         o_keys = ''.join(str(o._cmp_key()) for o in |         yield self.front_end | ||||||
|                          sorted(self.operating_sys.values())) |         yield self.back_end | ||||||
|         return (self.name, |         yield self.default_os | ||||||
|                 self.default, |         yield self.front_os | ||||||
|                 self.front_end, |         yield self.back_os | ||||||
|                 self.back_end, | 
 | ||||||
|                 self.default_os, |         def targets(): | ||||||
|                 self.front_os, |             for t in sorted(self.targets.values()): | ||||||
|                 self.back_os, |                 yield t._cmp_iter | ||||||
|                 t_keys, |         yield targets | ||||||
|                 o_keys) | 
 | ||||||
|  |         def oses(): | ||||||
|  |             for o in sorted(self.operating_sys.values()): | ||||||
|  |                 yield o._cmp_iter | ||||||
|  |         yield oses | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class OperatingSystem(object): | class OperatingSystem(object): | ||||||
|     """ Operating System will be like a class similar to platform extended |     """ Operating System will be like a class similar to platform extended | ||||||
|         by subclasses for the specifics. Operating System will contain the |         by subclasses for the specifics. Operating System will contain the | ||||||
| @@ -363,8 +367,9 @@ def __str__(self): | |||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return self.__str__() |         return self.__str__() | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return (self.name, self.version) |         yield self.name | ||||||
|  |         yield self.version | ||||||
| 
 | 
 | ||||||
|     def to_dict(self): |     def to_dict(self): | ||||||
|         return syaml_dict([ |         return syaml_dict([ | ||||||
| @@ -373,7 +378,7 @@ def to_dict(self): | |||||||
|         ]) |         ]) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class Arch(object): | class Arch(object): | ||||||
|     """Architecture is now a class to help with setting attributes. |     """Architecture is now a class to help with setting attributes. | ||||||
| 
 | 
 | ||||||
| @@ -423,20 +428,21 @@ def __nonzero__(self): | |||||||
|                 self.target is not None) |                 self.target is not None) | ||||||
|     __bool__ = __nonzero__ |     __bool__ = __nonzero__ | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         if isinstance(self.platform, Platform): |         if isinstance(self.platform, Platform): | ||||||
|             platform = self.platform.name |             yield self.platform.name | ||||||
|         else: |         else: | ||||||
|             platform = self.platform |             yield self.platform | ||||||
|  | 
 | ||||||
|         if isinstance(self.os, OperatingSystem): |         if isinstance(self.os, OperatingSystem): | ||||||
|             os = self.os.name |             yield self.os.name | ||||||
|         else: |         else: | ||||||
|             os = self.os |             yield self.os | ||||||
|  | 
 | ||||||
|         if isinstance(self.target, Target): |         if isinstance(self.target, Target): | ||||||
|             target = self.target.microarchitecture |             yield self.target.microarchitecture | ||||||
|         else: |         else: | ||||||
|             target = self.target |             yield self.target | ||||||
|         return (platform, os, target) |  | ||||||
| 
 | 
 | ||||||
|     def to_dict(self): |     def to_dict(self): | ||||||
|         str_or_none = lambda v: str(v) if v else None |         str_or_none = lambda v: str(v) if v else None | ||||||
| @@ -458,7 +464,7 @@ def from_dict(d): | |||||||
|         return arch_for_spec(spec) |         return arch_for_spec(spec) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @memoized | @lang.memoized | ||||||
| def get_platform(platform_name): | def get_platform(platform_name): | ||||||
|     """Returns a platform object that corresponds to the given name.""" |     """Returns a platform object that corresponds to the given name.""" | ||||||
|     platform_list = all_platforms() |     platform_list = all_platforms() | ||||||
| @@ -494,13 +500,13 @@ def arch_for_spec(arch_spec): | |||||||
|     return Arch(arch_plat, arch_spec.os, arch_spec.target) |     return Arch(arch_plat, arch_spec.os, arch_spec.target) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @memoized | @lang.memoized | ||||||
| def _all_platforms(): | def _all_platforms(): | ||||||
|     classes = [] |     classes = [] | ||||||
|     mod_path = spack.paths.platform_path |     mod_path = spack.paths.platform_path | ||||||
|     parent_module = "spack.platforms" |     parent_module = "spack.platforms" | ||||||
| 
 | 
 | ||||||
|     for name in list_modules(mod_path): |     for name in lang.list_modules(mod_path): | ||||||
|         mod_name = '%s.%s' % (parent_module, name) |         mod_name = '%s.%s' % (parent_module, name) | ||||||
|         class_name = mod_to_class(name) |         class_name = mod_to_class(name) | ||||||
|         mod = __import__(mod_name, fromlist=[class_name]) |         mod = __import__(mod_name, fromlist=[class_name]) | ||||||
| @@ -515,7 +521,7 @@ def _all_platforms(): | |||||||
|     return classes |     return classes | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @memoized | @lang.memoized | ||||||
| def _platform(): | def _platform(): | ||||||
|     """Detects the platform for this machine. |     """Detects the platform for this machine. | ||||||
| 
 | 
 | ||||||
| @@ -546,7 +552,7 @@ def _platform(): | |||||||
| all_platforms = _all_platforms | all_platforms = _all_platforms | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @memoized | @lang.memoized | ||||||
| def default_arch(): | def default_arch(): | ||||||
|     """Default ``Arch`` object for this machine. |     """Default ``Arch`` object for this machine. | ||||||
| 
 | 
 | ||||||
| @@ -570,7 +576,7 @@ def sys_type(): | |||||||
|     return str(default_arch()) |     return str(default_arch()) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @memoized | @lang.memoized | ||||||
| def compatible_sys_types(): | def compatible_sys_types(): | ||||||
|     """Returns a list of all the systypes compatible with the current host.""" |     """Returns a list of all the systypes compatible with the current host.""" | ||||||
|     compatible_archs = [] |     compatible_archs = [] | ||||||
|   | |||||||
| @@ -203,7 +203,7 @@ def __call__(self, match): | |||||||
|     return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + '@.') |     return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + '@.') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lang.key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class ArchSpec(object): | class ArchSpec(object): | ||||||
|     def __init__(self, spec_or_platform_tuple=(None, None, None)): |     def __init__(self, spec_or_platform_tuple=(None, None, None)): | ||||||
|         """ Architecture specification a package should be built with. |         """ Architecture specification a package should be built with. | ||||||
| @@ -252,8 +252,10 @@ def _autospec(self, spec_like): | |||||||
|             return spec_like |             return spec_like | ||||||
|         return ArchSpec(spec_like) |         return ArchSpec(spec_like) | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return self.platform, self.os, self.target |         yield self.platform | ||||||
|  |         yield self.os | ||||||
|  |         yield self.target | ||||||
| 
 | 
 | ||||||
|     def _dup(self, other): |     def _dup(self, other): | ||||||
|         self.platform = other.platform |         self.platform = other.platform | ||||||
| @@ -534,7 +536,7 @@ def __contains__(self, string): | |||||||
|         return string in str(self) or string in self.target |         return string in str(self) or string in self.target | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lang.key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class CompilerSpec(object): | class CompilerSpec(object): | ||||||
|     """The CompilerSpec field represents the compiler or range of compiler |     """The CompilerSpec field represents the compiler or range of compiler | ||||||
|        versions that a package should be built with.  CompilerSpecs have a |        versions that a package should be built with.  CompilerSpecs have a | ||||||
| @@ -623,8 +625,9 @@ def copy(self): | |||||||
|         clone.versions = self.versions.copy() |         clone.versions = self.versions.copy() | ||||||
|         return clone |         return clone | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return (self.name, self.versions) |         yield self.name | ||||||
|  |         yield self.versions | ||||||
| 
 | 
 | ||||||
|     def to_dict(self): |     def to_dict(self): | ||||||
|         d = syaml.syaml_dict([('name', self.name)]) |         d = syaml.syaml_dict([('name', self.name)]) | ||||||
| @@ -648,7 +651,7 @@ def __repr__(self): | |||||||
|         return str(self) |         return str(self) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lang.key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class DependencySpec(object): | class DependencySpec(object): | ||||||
|     """DependencySpecs connect two nodes in the DAG, and contain deptypes. |     """DependencySpecs connect two nodes in the DAG, and contain deptypes. | ||||||
| 
 | 
 | ||||||
| @@ -686,10 +689,10 @@ def add_type(self, type): | |||||||
|             self.deptypes + dp.canonical_deptype(type) |             self.deptypes + dp.canonical_deptype(type) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return (self.parent.name if self.parent else None, |         yield self.parent.name if self.parent else None | ||||||
|                 self.spec.name if self.spec else None, |         yield self.spec.name if self.spec else None | ||||||
|                 self.deptypes) |         yield self.deptypes | ||||||
| 
 | 
 | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return "%s %s--> %s" % (self.parent.name if self.parent else None, |         return "%s %s--> %s" % (self.parent.name if self.parent else None, | ||||||
| @@ -747,8 +750,15 @@ def copy(self): | |||||||
|             clone[name] = value |             clone[name] = value | ||||||
|         return clone |         return clone | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |     def _cmp_iter(self): | ||||||
|         return tuple((k, tuple(v)) for k, v in sorted(six.iteritems(self))) |         for k, v in sorted(self.items()): | ||||||
|  |             yield k | ||||||
|  | 
 | ||||||
|  |             def flags(): | ||||||
|  |                 for flag in v: | ||||||
|  |                     yield flag | ||||||
|  | 
 | ||||||
|  |             yield flags | ||||||
| 
 | 
 | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         sorted_keys = [k for k in sorted(self.keys()) if self[k] != []] |         sorted_keys = [k for k in sorted(self.keys()) if self[k] != []] | ||||||
| @@ -1016,7 +1026,7 @@ def __init__(self, spec, name, query_parameters): | |||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lang.key_ordering | @lang.lazy_lexicographic_ordering(set_hash=False) | ||||||
| class Spec(object): | class Spec(object): | ||||||
| 
 | 
 | ||||||
|     #: Cache for spec's prefix, computed lazily in the corresponding property |     #: Cache for spec's prefix, computed lazily in the corresponding property | ||||||
| @@ -1060,7 +1070,7 @@ def __init__(self, spec_like=None, | |||||||
| 
 | 
 | ||||||
|         self._hash = None |         self._hash = None | ||||||
|         self._build_hash = None |         self._build_hash = None | ||||||
|         self._cmp_key_cache = None |         self._dunder_hash = None | ||||||
|         self._package = None |         self._package = None | ||||||
| 
 | 
 | ||||||
|         # Most of these are internal implementation details that can be |         # Most of these are internal implementation details that can be | ||||||
| @@ -3358,7 +3368,7 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None): | |||||||
|                 before possibly copying the dependencies of ``other`` onto |                 before possibly copying the dependencies of ``other`` onto | ||||||
|                 ``self`` |                 ``self`` | ||||||
|             caches (bool or None): preserve cached fields such as |             caches (bool or None): preserve cached fields such as | ||||||
|                 ``_normal``, ``_hash``, and ``_cmp_key_cache``. By |                 ``_normal``, ``_hash``, and ``_dunder_hash``. By | ||||||
|                 default this is ``False`` if DAG structure would be |                 default this is ``False`` if DAG structure would be | ||||||
|                 changed by the copy, ``True`` if it's an exact copy. |                 changed by the copy, ``True`` if it's an exact copy. | ||||||
| 
 | 
 | ||||||
| @@ -3432,13 +3442,13 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None): | |||||||
|         if caches: |         if caches: | ||||||
|             self._hash = other._hash |             self._hash = other._hash | ||||||
|             self._build_hash = other._build_hash |             self._build_hash = other._build_hash | ||||||
|             self._cmp_key_cache = other._cmp_key_cache |             self._dunder_hash = other._dunder_hash | ||||||
|             self._normal = other._normal |             self._normal = other._normal | ||||||
|             self._full_hash = other._full_hash |             self._full_hash = other._full_hash | ||||||
|         else: |         else: | ||||||
|             self._hash = None |             self._hash = None | ||||||
|             self._build_hash = None |             self._build_hash = None | ||||||
|             self._cmp_key_cache = None |             self._dunder_hash = None | ||||||
|             self._normal = False |             self._normal = False | ||||||
|             self._full_hash = None |             self._full_hash = None | ||||||
| 
 | 
 | ||||||
| @@ -3562,13 +3572,17 @@ def sorted_deps(self): | |||||||
|         deps = self.flat_dependencies() |         deps = self.flat_dependencies() | ||||||
|         return tuple(deps[name] for name in sorted(deps)) |         return tuple(deps[name] for name in sorted(deps)) | ||||||
| 
 | 
 | ||||||
|     def _eq_dag(self, other, vs, vo, deptypes): |     def eq_dag(self, other, deptypes=True, vs=None, vo=None): | ||||||
|         """Recursive helper for eq_dag and ne_dag.  Does the actual DAG |         """True if the full dependency DAGs of specs are equal.""" | ||||||
|            traversal.""" |         if vs is None: | ||||||
|  |             vs = set() | ||||||
|  |         if vo is None: | ||||||
|  |             vo = set() | ||||||
|  | 
 | ||||||
|         vs.add(id(self)) |         vs.add(id(self)) | ||||||
|         vo.add(id(other)) |         vo.add(id(other)) | ||||||
| 
 | 
 | ||||||
|         if self.ne_node(other): |         if not self.eq_node(other): | ||||||
|             return False |             return False | ||||||
| 
 | 
 | ||||||
|         if len(self._dependencies) != len(other._dependencies): |         if len(self._dependencies) != len(other._dependencies): | ||||||
| @@ -3596,58 +3610,38 @@ def _eq_dag(self, other, vs, vo, deptypes): | |||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             # Recursive check for equality |             # Recursive check for equality | ||||||
|             if not s._eq_dag(o, vs, vo, deptypes): |             if not s.eq_dag(o, deptypes, vs, vo): | ||||||
|                 return False |                 return False | ||||||
| 
 | 
 | ||||||
|         return True |         return True | ||||||
| 
 | 
 | ||||||
|     def eq_dag(self, other, deptypes=True): |  | ||||||
|         """True if the full dependency DAGs of specs are equal.""" |  | ||||||
|         return self._eq_dag(other, set(), set(), deptypes) |  | ||||||
| 
 |  | ||||||
|     def ne_dag(self, other, deptypes=True): |  | ||||||
|         """True if the full dependency DAGs of specs are not equal.""" |  | ||||||
|         return not self.eq_dag(other, set(), set(), deptypes) |  | ||||||
| 
 |  | ||||||
|     def _cmp_node(self): |     def _cmp_node(self): | ||||||
|         """Comparison key for just *this node* and not its deps.""" |         """Yield comparable elements of just *this node* and not its deps.""" | ||||||
|         # Name or namespace None will lead to invalid comparisons for abstract |         yield self.name or '' | ||||||
|         # specs. Replace them with the empty string, which is not a valid spec |         yield self.namespace or '' | ||||||
|         # name nor namespace so it will not create spurious equalities. |         yield self.versions | ||||||
|         return (self.name or '', |         yield self.variants | ||||||
|                 self.namespace or '', |         yield self.compiler | ||||||
|                 tuple(self.versions), |         yield self.compiler_flags | ||||||
|                 self.variants, |         yield self.architecture | ||||||
|                 self.architecture, |  | ||||||
|                 self.compiler, |  | ||||||
|                 self.compiler_flags) |  | ||||||
| 
 | 
 | ||||||
|     def eq_node(self, other): |     def eq_node(self, other): | ||||||
|         """Equality with another spec, not including dependencies.""" |         """Equality with another spec, not including dependencies.""" | ||||||
|         return self._cmp_node() == other._cmp_node() |         return (other is not None) and lang.lazy_eq( | ||||||
|  |             self._cmp_node, other._cmp_node | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def ne_node(self, other): |     def _cmp_iter(self): | ||||||
|         """Inequality with another spec, not including dependencies.""" |         """Lazily yield components of self for comparison.""" | ||||||
|         return self._cmp_node() != other._cmp_node() |         for item in self._cmp_node(): | ||||||
|  |             yield item | ||||||
| 
 | 
 | ||||||
|     def _cmp_key(self): |         def deps(): | ||||||
|         """This returns a key for the spec *including* DAG structure. |             for _, dep in sorted(self._dependencies.items()): | ||||||
| 
 |                 yield dep.spec.name | ||||||
|         The key is the concatenation of: |                 yield tuple(sorted(dep.deptypes)) | ||||||
|           1. A tuple describing this node in the DAG. |                 yield hash(dep.spec) | ||||||
|           2. The hash of each of this node's dependencies' cmp_keys. |         yield deps | ||||||
|         """ |  | ||||||
|         if self._cmp_key_cache: |  | ||||||
|             return self._cmp_key_cache |  | ||||||
| 
 |  | ||||||
|         dep_tuple = tuple( |  | ||||||
|             (d.spec.name, hash(d.spec), tuple(sorted(d.deptypes))) |  | ||||||
|             for name, d in sorted(self._dependencies.items())) |  | ||||||
| 
 |  | ||||||
|         key = (self._cmp_node(), dep_tuple) |  | ||||||
|         if self._concrete: |  | ||||||
|             self._cmp_key_cache = key |  | ||||||
|         return key |  | ||||||
| 
 | 
 | ||||||
|     def colorized(self): |     def colorized(self): | ||||||
|         return colorize_spec(self) |         return colorize_spec(self) | ||||||
| @@ -4339,6 +4333,22 @@ def clear_cached_hashes(self): | |||||||
|             if hasattr(self, attr): |             if hasattr(self, attr): | ||||||
|                 setattr(self, attr, None) |                 setattr(self, attr, None) | ||||||
| 
 | 
 | ||||||
|  |     def __hash__(self): | ||||||
|  |         # If the spec is concrete, we leverage the DAG hash and just use | ||||||
|  |         # a 64-bit prefix of it. The DAG hash has the advantage that it's | ||||||
|  |         # computed once per concrete spec, and it's saved -- so if we | ||||||
|  |         # read concrete specs we don't need to recompute the whole hash. | ||||||
|  |         # This is good for large, unchanging specs. | ||||||
|  |         if self.concrete: | ||||||
|  |             if not self._dunder_hash: | ||||||
|  |                 self._dunder_hash = self.dag_hash_bit_prefix(64) | ||||||
|  |             return self._dunder_hash | ||||||
|  | 
 | ||||||
|  |         # This is the normal hash for lazy_lexicographic_ordering. It's | ||||||
|  |         # slow for large specs because it traverses the whole spec graph, | ||||||
|  |         # so we hope it only runs on abstract specs, which are small. | ||||||
|  |         return hash(lang.tuplify(self._cmp_iter)) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| class LazySpecCache(collections.defaultdict): | class LazySpecCache(collections.defaultdict): | ||||||
|     """Cache for Specs that uses a spec_like as key, and computes lazily |     """Cache for Specs that uses a spec_like as key, and computes lazily | ||||||
|   | |||||||
| @@ -352,10 +352,10 @@ def test_multiple_specs_with_hash(self, database): | |||||||
|     def test_ambiguous_hash(self, mutable_database): |     def test_ambiguous_hash(self, mutable_database): | ||||||
|         x1 = Spec('a') |         x1 = Spec('a') | ||||||
|         x1.concretize() |         x1.concretize() | ||||||
|         x1._hash = 'xy' |         x1._hash = 'xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy' | ||||||
|         x2 = Spec('a') |         x2 = Spec('a') | ||||||
|         x2.concretize() |         x2.concretize() | ||||||
|         x2._hash = 'xx' |         x2._hash = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' | ||||||
| 
 | 
 | ||||||
|         mutable_database.add(x1, spack.store.layout) |         mutable_database.add(x1, spack.store.layout) | ||||||
|         mutable_database.add(x2, spack.store.layout) |         mutable_database.add(x2, spack.store.layout) | ||||||
| @@ -557,10 +557,6 @@ def test_parse_yaml_dependency(self, mock_packages, tmpdir): | |||||||
|         with specfile.open('w') as f: |         with specfile.open('w') as f: | ||||||
|             f.write(s['libelf'].to_yaml(hash=ht.build_hash)) |             f.write(s['libelf'].to_yaml(hash=ht.build_hash)) | ||||||
| 
 | 
 | ||||||
|         print("") |  | ||||||
|         print("") |  | ||||||
|         print("PARSING HERE") |  | ||||||
| 
 |  | ||||||
|         # Make sure we can use yaml path as dependency, e.g.: |         # Make sure we can use yaml path as dependency, e.g.: | ||||||
|         #     "spack spec libdwarf ^ /path/to/libelf.yaml" |         #     "spack spec libdwarf ^ /path/to/libelf.yaml" | ||||||
|         specs = sp.parse('libdwarf ^ {0}'.format(specfile.strpath)) |         specs = sp.parse('libdwarf ^ {0}'.format(specfile.strpath)) | ||||||
|   | |||||||
| @@ -201,7 +201,7 @@ def convert(self, other): | |||||||
|     return convert |     return convert | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lang.key_ordering | @lang.lazy_lexicographic_ordering | ||||||
| class AbstractVariant(object): | class AbstractVariant(object): | ||||||
|     """A variant that has not yet decided who it wants to be. It behaves like |     """A variant that has not yet decided who it wants to be. It behaves like | ||||||
|     a multi valued variant which **could** do things. |     a multi valued variant which **could** do things. | ||||||
| @@ -282,21 +282,14 @@ def _value_setter(self, value): | |||||||
|         # to a set |         # to a set | ||||||
|         self._value = tuple(sorted(set(value))) |         self._value = tuple(sorted(set(value))) | ||||||
| 
 | 
 | ||||||
|     def _cmp_value(self): |     def _cmp_iter(self): | ||||||
|         """Returns a tuple of strings containing the values stored in |         yield self.name | ||||||
|         the variant. |  | ||||||
| 
 | 
 | ||||||
|         Returns: |  | ||||||
|             tuple of str: values stored in the variant |  | ||||||
|         """ |  | ||||||
|         value = self._value |         value = self._value | ||||||
|         if not isinstance(value, tuple): |         if not isinstance(value, tuple): | ||||||
|             value = (value,) |             value = (value,) | ||||||
|         stringified = tuple(str(x) for x in value) |         value = tuple(str(x) for x in value) | ||||||
|         return stringified |         yield value | ||||||
| 
 |  | ||||||
|     def _cmp_key(self): |  | ||||||
|         return self.name, self._cmp_value() |  | ||||||
| 
 | 
 | ||||||
|     def copy(self): |     def copy(self): | ||||||
|         """Returns an instance of a variant equivalent to self |         """Returns an instance of a variant equivalent to self | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user