summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMassimiliano Culpo <massimiliano.culpo@gmail.com>2021-08-28 16:31:16 +0200
committerGitHub <noreply@github.com>2021-08-28 07:31:16 -0700
commit40788cf49adf93c2598a4d09a3c4319c75c6f883 (patch)
treed0b71d0bc3115662f1092eb693ebb1039c31b9e4 /lib
parent025dbb216276d36c591358c0d54f69926fb1b799 (diff)
downloadspack-40788cf49adf93c2598a4d09a3c4319c75c6f883.tar.gz
spack-40788cf49adf93c2598a4d09a3c4319c75c6f883.tar.bz2
spack-40788cf49adf93c2598a4d09a3c4319c75c6f883.tar.xz
spack-40788cf49adf93c2598a4d09a3c4319c75c6f883.zip
Add a __reduce__ method to Spec (#25658)
* Add a __reduce__ method to Spec fixes #23892 The recursion limit seems to be due to the default way in which a Spec is serialized, following all the attributes. It's still not clear to me why this is related to being in an environment, but in any case we already have methods to serialize Specs to disk in JSON and YAML format. Here we use them to pickle a Spec instance too. * Downgrade to build-hash Hopefully nothing will change the package in between serializing the spec and sending it to the child process. * Add support for Python 2
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/spec.py60
1 files changed, 38 insertions, 22 deletions
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 7ab0ffa5fd..af9efb3baa 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -2093,28 +2093,7 @@ class Spec(object):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
- nodes = data['spec']
-
- # Read nodes out of list. Root spec is the first element;
- # dependencies are the following elements.
- dep_list = [Spec.from_node_dict(node) for node in nodes]
- if not dep_list:
- raise spack.error.SpecError("YAML spec contains no nodes.")
- deps = dict((spec.name, spec) for spec in dep_list)
- spec = dep_list[0]
-
- for node in nodes:
- # get dependency dict from the node.
- name = next(iter(node))
-
- if 'dependencies' not in node[name]:
- continue
-
- yaml_deps = node[name]['dependencies']
- for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
- deps[name]._add_dependency(deps[dname], dtypes)
-
- return spec
+ return _spec_from_dict(data)
@staticmethod
def from_yaml(stream):
@@ -4385,6 +4364,43 @@ class Spec(object):
# so we hope it only runs on abstract specs, which are small.
return hash(lang.tuplify(self._cmp_iter))
+ def __reduce__(self):
+ return _spec_from_dict, (self.to_dict(hash=ht.build_hash),)
+
+
+# Note: This function has been refactored from being a static method
+# of Spec to be a function at the module level. This was needed to
+# support its use in __reduce__ to pickle a Spec object in Python 2.
+# It can be moved back safely after we drop support for Python 2.7
+def _spec_from_dict(data):
+ """Construct a spec from YAML.
+
+ Parameters:
+ data -- a nested dict/list data structure read from YAML or JSON.
+ """
+ nodes = data['spec']
+
+ # Read nodes out of list. Root spec is the first element;
+ # dependencies are the following elements.
+ dep_list = [Spec.from_node_dict(node) for node in nodes]
+ if not dep_list:
+ raise spack.error.SpecError("YAML spec contains no nodes.")
+ deps = dict((spec.name, spec) for spec in dep_list)
+ spec = dep_list[0]
+
+ for node in nodes:
+ # get dependency dict from the node.
+ name = next(iter(node))
+
+ if 'dependencies' not in node[name]:
+ continue
+
+ yaml_deps = node[name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ deps[name]._add_dependency(deps[dname], dtypes)
+
+ return spec
+
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily