Add a __reduce__ method to Spec (#25658)

* Add a __reduce__ method to Spec

fixes #23892

The recursion limit seems to be due to the default
way in which a Spec is serialized, following all
the attributes. It's still not clear to me why this
is related to being in an environment, but in any
case we already have methods to serialize Specs to
disk in JSON and YAML format. Here we use them to
pickle a Spec instance too.

* Downgrade to build-hash

Hopefully nothing will change the package in
between serializing the spec and sending it
to the child process.

* Add support for Python 2
This commit is contained in:
Massimiliano Culpo 2021-08-28 16:31:16 +02:00 committed by GitHub
parent 025dbb2162
commit 40788cf49a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -2093,28 +2093,7 @@ def from_dict(data):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
nodes = data['spec']
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
dep_list = [Spec.from_node_dict(node) for node in nodes]
if not dep_list:
raise spack.error.SpecError("YAML spec contains no nodes.")
deps = dict((spec.name, spec) for spec in dep_list)
spec = dep_list[0]
for node in nodes:
# get dependency dict from the node.
name = next(iter(node))
if 'dependencies' not in node[name]:
continue
yaml_deps = node[name]['dependencies']
for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
deps[name]._add_dependency(deps[dname], dtypes)
return spec
return _spec_from_dict(data)
@staticmethod
def from_yaml(stream):
@ -4385,6 +4364,43 @@ def __hash__(self):
# so we hope it only runs on abstract specs, which are small.
return hash(lang.tuplify(self._cmp_iter))
def __reduce__(self):
return _spec_from_dict, (self.to_dict(hash=ht.build_hash),)
# Note: This function has been refactored from being a static method
# of Spec to be a function at the module level. This was needed to
# support its use in __reduce__ to pickle a Spec object in Python 2.
# It can be moved back safely after we drop support for Python 2.7
def _spec_from_dict(data):
"""Construct a spec from YAML.
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
nodes = data['spec']
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
dep_list = [Spec.from_node_dict(node) for node in nodes]
if not dep_list:
raise spack.error.SpecError("YAML spec contains no nodes.")
deps = dict((spec.name, spec) for spec in dep_list)
spec = dep_list[0]
for node in nodes:
# get dependency dict from the node.
name = next(iter(node))
if 'dependencies' not in node[name]:
continue
yaml_deps = node[name]['dependencies']
for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
deps[name]._add_dependency(deps[dname], dtypes)
return spec
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily