summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/spack/spack/build_systems/python.py2
-rw-r--r--lib/spack/spack/cray_manifest.py2
-rw-r--r--lib/spack/spack/database.py25
-rw-r--r--lib/spack/spack/environment/environment.py31
-rw-r--r--lib/spack/spack/parser.py2
-rw-r--r--lib/spack/spack/provider_index.py4
-rw-r--r--lib/spack/spack/solver/asp.py2
-rw-r--r--lib/spack/spack/spec.py483
-rw-r--r--lib/spack/spack/test/cmd/test.py2
-rw-r--r--lib/spack/spack/test/data/specfiles/hdf5.v013.json.gzbin0 -> 2187 bytes
-rw-r--r--lib/spack/spack/test/data/specfiles/hdf5.v016.json.gzbin0 -> 2925 bytes
-rw-r--r--lib/spack/spack/test/data/specfiles/hdf5.v017.json.gzbin0 -> 4119 bytes
-rw-r--r--lib/spack/spack/test/data/specfiles/hdf5.v019.json.gzbin0 -> 4968 bytes
-rw-r--r--lib/spack/spack/test/spec_dag.py30
-rw-r--r--lib/spack/spack/test/spec_semantics.py11
-rw-r--r--lib/spack/spack/test/spec_yaml.py32
-rw-r--r--lib/spack/spack/test/traverse.py4
17 files changed, 374 insertions, 256 deletions
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index d93807ff20..84caebefc6 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -267,7 +267,7 @@ class PythonPackage(PythonExtension):
python.external_path = self.spec.external_path
python._mark_concrete()
- self.spec.add_dependency_edge(python, ("build", "link", "run"))
+ self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
def get_external_python_for_prefix(self):
"""
diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py
index 1f1f5f0ca3..c2908f61be 100644
--- a/lib/spack/spack/cray_manifest.py
+++ b/lib/spack/spack/cray_manifest.py
@@ -162,7 +162,7 @@ def entries_to_specs(entries):
continue
parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
- parent_spec._add_dependency(dep_spec, deptypes)
+ parent_spec._add_dependency(dep_spec, deptypes=deptypes)
return spec_dict
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 069d95347b..0607c6312a 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -107,6 +107,14 @@ default_install_record_fields = [
]
+def reader(version):
+ reader_cls = {
+ Version("5"): spack.spec.SpecfileV1,
+ Version("6"): spack.spec.SpecfileV3,
+ }
+ return reader_cls[version]
+
+
def _now():
"""Returns the time since the epoch"""
return time.time()
@@ -674,7 +682,7 @@ class Database(object):
except (TypeError, ValueError) as e:
raise sjson.SpackJSONError("error writing JSON database:", str(e))
- def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
+ def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash):
"""Recursively construct a spec from a hash in a YAML database.
Does not do any locking.
@@ -692,7 +700,7 @@ class Database(object):
spec_dict[hash.name] = hash_key
# Build spec from dict first.
- spec = spack.spec.Spec.from_node_dict(spec_dict)
+ spec = spec_reader.from_node_dict(spec_dict)
return spec
def db_for_spec_hash(self, hash_key):
@@ -732,7 +740,7 @@ class Database(object):
with self.read_transaction():
return self._data.get(hash_key, None)
- def _assign_dependencies(self, hash_key, installs, data):
+ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
spec = data[hash_key].spec
@@ -742,7 +750,7 @@ class Database(object):
spec_node_dict = spec_node_dict[spec.name]
if "dependencies" in spec_node_dict:
yaml_deps = spec_node_dict["dependencies"]
- for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps):
+ for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
# It is important that we always check upstream installations
# in the same order, and that we always check the local
# installation first: if a downstream Spack installs a package
@@ -765,7 +773,7 @@ class Database(object):
tty.warn(msg)
continue
- spec._add_dependency(child, dtypes)
+ spec._add_dependency(child, deptypes=dtypes)
def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data.
@@ -797,6 +805,7 @@ class Database(object):
# TODO: better version checking semantics.
version = Version(db["version"])
+ spec_reader = reader(version)
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
@@ -832,7 +841,7 @@ class Database(object):
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
- spec = self._read_spec_from_dict(hash_key, installs)
+ spec = self._read_spec_from_dict(spec_reader, hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
@@ -848,7 +857,7 @@ class Database(object):
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
- self._assign_dependencies(hash_key, installs, data)
+ self._assign_dependencies(spec_reader, hash_key, installs, data)
except MissingDependenciesError:
raise
except Exception as e:
@@ -1167,7 +1176,7 @@ class Database(object):
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
- new_spec._add_dependency(record.spec, dep.deptypes)
+ new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
if not upstream:
record.ref_count += 1
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index f81232feec..c008cc5c7e 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -104,6 +104,15 @@ valid_environment_name_re = r"^\w[\w-]*$"
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 4
+
+READER_CLS = {
+ 1: spack.spec.SpecfileV1,
+ 2: spack.spec.SpecfileV1,
+ 3: spack.spec.SpecfileV2,
+ 4: spack.spec.SpecfileV3,
+}
+
+
# Magic names
# The name of the standalone spec list in the manifest yaml
user_speclist_name = "specs"
@@ -1436,7 +1445,7 @@ class Environment(object):
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
- test_dependency.copy(), deptype="test"
+ test_dependency.copy(), deptypes="test"
)
results = [
@@ -1942,7 +1951,7 @@ class Environment(object):
"_meta": {
"file-type": "spack-lockfile",
"lockfile-version": lockfile_format_version,
- "specfile-version": spack.spec.specfile_format_version,
+ "specfile-version": spack.spec.SPECFILE_FORMAT_VERSION,
},
# users specs + hashes are the 'roots' of the environment
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
@@ -1975,10 +1984,19 @@ class Environment(object):
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
+ current_lockfile_format = d["_meta"]["lockfile-version"]
+ try:
+ reader = READER_CLS[current_lockfile_format]
+ except KeyError:
+ msg = (
+ f"Spack {spack.__version__} cannot read environment lockfiles using the "
+ f"v{current_lockfile_format} format"
+ )
+ raise RuntimeError(msg)
# First pass: Put each spec in the map ignoring dependencies
for lockfile_key, node_dict in json_specs_by_hash.items():
- spec = Spec.from_node_dict(node_dict)
+ spec = reader.from_node_dict(node_dict)
if not spec._hash:
# in v1 lockfiles, the hash only occurs as a key
spec._hash = lockfile_key
@@ -1987,8 +2005,11 @@ class Environment(object):
# Second pass: For each spec, get its dependencies from the node dict
# and add them to the spec
for lockfile_key, node_dict in json_specs_by_hash.items():
- for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict):
- specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes)
+ name, data = reader.name_and_data(node_dict)
+ for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
+ specs_by_hash[lockfile_key]._add_dependency(
+ specs_by_hash[dep_hash], deptypes=deptypes
+ )
# Traverse the root specs one at a time in the order they appear.
# The first time we see each DAG hash, that's the one we want to
diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py
index bed05c3316..b4748b259f 100644
--- a/lib/spack/spack/parser.py
+++ b/lib/spack/spack/parser.py
@@ -283,7 +283,7 @@ class SpecParser:
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
- root_spec._add_dependency(dependency, ())
+ root_spec._add_dependency(dependency, deptypes=())
else:
break
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
index c572850385..526a7dc762 100644
--- a/lib/spack/spack/provider_index.py
+++ b/lib/spack/spack/provider_index.py
@@ -292,8 +292,8 @@ class ProviderIndex(_IndexBase):
index.providers = _transform(
providers,
lambda vpkg, plist: (
- spack.spec.Spec.from_node_dict(vpkg),
- set(spack.spec.Spec.from_node_dict(p) for p in plist),
+ spack.spec.SpecfileV3.from_node_dict(vpkg),
+ set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
),
)
return index
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index c6a8ea0c89..c8f213fcd4 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -2259,7 +2259,7 @@ class SpecBuilder(object):
assert len(dependencies) < 2, msg
if not dependencies:
- self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
+ self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
else:
# TODO: This assumes that each solve unifies dependencies
dependencies[0].add_type(type)
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index ebaefbcf27..aaeb15e82e 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -168,7 +168,7 @@ CLEARSIGN_FILE_REGEX = re.compile(
)
#: specfile format version. Must increase monotonically
-specfile_format_version = 3
+SPECFILE_FORMAT_VERSION = 3
def colorize_spec(spec):
@@ -1529,14 +1529,14 @@ class Spec(object):
)
self.compiler = compiler
- def _add_dependency(self, spec, deptypes):
+ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
"""Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies:
- self.add_dependency_edge(spec, deptypes)
+ self.add_dependency_edge(spec, deptypes=deptypes)
return
# Keep the intersection of constraints when a dependency is added
- # multiple times. Currently we only allow identical edge types.
+ # multiple times. Currently, we only allow identical edge types.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
@@ -1550,34 +1550,39 @@ class Spec(object):
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
)
- def add_dependency_edge(self, dependency_spec, deptype):
+ def add_dependency_edge(
+ self,
+ dependency_spec: "Spec",
+ *,
+ deptypes: dp.DependencyArgument,
+ ):
"""Add a dependency edge to this spec.
Args:
- dependency_spec (Spec): spec of the dependency
- deptype (str or tuple): dependency types
+ dependency_spec: spec of the dependency
+ deptypes: dependency types for this edge
"""
- deptype = dp.canonical_deptype(deptype)
+ deptypes = dp.canonical_deptype(deptypes)
# Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
- if any(d in edge.deptypes for d in deptype):
+ if any(d in edge.deptypes for d in deptypes):
msg = (
'cannot add a dependency on "{0.spec}" of {1} type '
'when the "{0.parent}" has the edge {0!s} already'
)
- raise spack.error.SpecError(msg.format(edge, deptype))
+ raise spack.error.SpecError(msg.format(edge, deptypes))
for edge in selected:
if id(dependency_spec) == id(edge.spec):
# If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll
# both see the deptype modification.
- edge.add_type(deptype)
+ edge.add_type(deptypes)
return
- edge = DependencySpec(self, dependency_spec, deptypes=deptype)
+ edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
self._dependencies.add(edge)
dependency_spec._dependents.add(edge)
@@ -2027,7 +2032,7 @@ class Spec(object):
node_list.append(node)
hash_set.add(node_hash)
- meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
+ meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
return spec_dict
@@ -2063,7 +2068,7 @@ class Spec(object):
@staticmethod
def from_specfile(path):
- """Construct a spec from aJSON or YAML spec file path"""
+ """Construct a spec from a JSON or YAML spec file path"""
with open(path, "r") as fd:
file_content = fd.read()
if path.endswith(".json"):
@@ -2071,130 +2076,6 @@ class Spec(object):
return Spec.from_yaml(file_content)
@staticmethod
- def from_node_dict(node):
- spec = Spec()
- if "name" in node.keys():
- # New format
- name = node["name"]
- else:
- # Old format
- name = next(iter(node))
- node = node[name]
- for h in ht.hashes:
- setattr(spec, h.attr, node.get(h.name, None))
-
- spec.name = name
- spec.namespace = node.get("namespace", None)
-
- if "version" in node or "versions" in node:
- spec.versions = vn.VersionList.from_dict(node)
-
- if "arch" in node:
- spec.architecture = ArchSpec.from_dict(node)
-
- if "compiler" in node:
- spec.compiler = CompilerSpec.from_dict(node)
- else:
- spec.compiler = None
-
- if "parameters" in node:
- for name, values in node["parameters"].items():
- if name in _valid_compiler_flags:
- spec.compiler_flags[name] = []
- for val in values:
- spec.compiler_flags.add_flag(name, val, False)
- else:
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
- elif "variants" in node:
- for name, value in node["variants"].items():
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
- for name in FlagMap.valid_compiler_flags():
- spec.compiler_flags[name] = []
-
- spec.external_path = None
- spec.external_modules = None
- if "external" in node:
- # This conditional is needed because sometimes this function is
- # called with a node already constructed that contains a 'versions'
- # and 'external' field. Related to virtual packages provider
- # indexes.
- if node["external"]:
- spec.external_path = node["external"]["path"]
- spec.external_modules = node["external"]["module"]
- if spec.external_modules is False:
- spec.external_modules = None
- spec.extra_attributes = node["external"].get(
- "extra_attributes", syaml.syaml_dict()
- )
-
- # specs read in are concrete unless marked abstract
- spec._concrete = node.get("concrete", True)
-
- if "patches" in node:
- patches = node["patches"]
- if len(patches) > 0:
- mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
- mvar.value = patches
- # FIXME: Monkey patches mvar to store patches order
- mvar._patches_in_order_of_appearance = patches
-
- # Don't read dependencies here; from_dict() is used by
- # from_yaml() and from_json() to read the root *and* each dependency
- # spec.
-
- return spec
-
- @staticmethod
- def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
- build_spec_dict = node["build_spec"]
- return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
-
- @staticmethod
- def dependencies_from_node_dict(node):
- if "name" in node.keys():
- # New format
- name = node["name"]
- else:
- name = next(iter(node))
- node = node[name]
- if "dependencies" not in node:
- return
- for t in Spec.read_yaml_dep_specs(node["dependencies"]):
- yield t
-
- @staticmethod
- def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
- """Read the DependencySpec portion of a YAML-formatted Spec.
- This needs to be backward-compatible with older spack spec
- formats so that reindex will work on old specs/databases.
- """
- dep_iter = deps.items() if isinstance(deps, dict) else deps
- for dep in dep_iter:
- if isinstance(dep, tuple):
- dep_name, elt = dep
- else:
- elt = dep
- dep_name = dep["name"]
- if isinstance(elt, str):
- # original format, elt is just the dependency hash.
- dep_hash, deptypes = elt, ["build", "link"]
- elif isinstance(elt, tuple):
- # original deptypes format: (used tuples, not future-proof)
- dep_hash, deptypes = elt
- elif isinstance(elt, dict):
- # new format: elements of dependency spec are keyed.
- for h in ht.hashes:
- if h.name in elt:
- dep_hash, deptypes = elt[h.name], elt["type"]
- hash_type = h.name
- break
- else: # We never determined a hash type...
- raise spack.error.SpecError("Couldn't parse dependency spec.")
- else:
- raise spack.error.SpecError("Couldn't parse dependency types in spec.")
- yield dep_name, dep_hash, list(deptypes), hash_type
-
- @staticmethod
def override(init_spec, change_spec):
# TODO: this doesn't account for the case where the changed spec
# (and the user spec) have dependencies
@@ -2367,7 +2248,7 @@ class Spec(object):
dag_node, dependency_types = spec_and_dependency_types(s)
dependency_spec = spec_builder({dag_node: s_dependencies})
- spec._add_dependency(dependency_spec, dependency_types)
+ spec._add_dependency(dependency_spec, deptypes=dependency_types)
return spec
@@ -2380,54 +2261,14 @@ class Spec(object):
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
- if isinstance(data["spec"], list): # Legacy specfile format
- return _spec_from_old_dict(data)
+ # Legacy specfile format
+ if isinstance(data["spec"], list):
+ return SpecfileV1.load(data)
- # Current specfile format
- nodes = data["spec"]["nodes"]
- hash_type = None
- any_deps = False
-
- # Pass 0: Determine hash type
- for node in nodes:
- if "dependencies" in node.keys():
- any_deps = True
- for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
- if dhash_type:
- hash_type = dhash_type
- break
-
- if not any_deps: # If we never see a dependency...
- hash_type = ht.dag_hash.name
- elif not hash_type: # Seen a dependency, still don't know hash_type
- raise spack.error.SpecError(
- "Spec dictionary contains malformed " "dependencies. Old format?"
- )
-
- hash_dict = {}
- root_spec_hash = None
-
- # Pass 1: Create a single lookup dictionary by hash
- for i, node in enumerate(nodes):
- node_hash = node[hash_type]
- node_spec = Spec.from_node_dict(node)
- hash_dict[node_hash] = node
- hash_dict[node_hash]["node_spec"] = node_spec
- if i == 0:
- root_spec_hash = node_hash
- if not root_spec_hash:
- raise spack.error.SpecError("Spec dictionary contains no nodes.")
-
- # Pass 2: Finish construction of all DAG edges (including build specs)
- for node_hash, node in hash_dict.items():
- node_spec = node["node_spec"]
- for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
- node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
- if "build_spec" in node.keys():
- _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
- node_spec._build_spec = hash_dict[bhash]["node_spec"]
-
- return hash_dict[root_spec_hash]["node_spec"]
+ specfile_version = int(data["spec"]["_meta"]["version"])
+ if specfile_version == 2:
+ return SpecfileV2.load(data)
+ return SpecfileV3.load(data)
@staticmethod
def from_yaml(stream):
@@ -2583,7 +2424,7 @@ class Spec(object):
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
- dependent._add_dependency(concrete, deptypes)
+ dependent._add_dependency(concrete, deptypes=deptypes)
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
@@ -3254,7 +3095,7 @@ class Spec(object):
# Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
- self._add_dependency(spec_dependency, dependency.type)
+ self._add_dependency(spec_dependency, deptypes=dependency.type)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
@@ -3585,7 +3426,7 @@ class Spec(object):
dep_spec_copy = other._get_dependency(name)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
- self._add_dependency(dep_copy.copy(), deptypes)
+ self._add_dependency(dep_copy.copy(), deptypes=deptypes)
changed = True
return changed
@@ -3898,7 +3739,7 @@ class Spec(object):
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge(
- new_specs[spid(edge.spec)], edge.deptypes
+ new_specs[spid(edge.spec)], deptypes=edge.deptypes
)
def copy(self, deps=True, **kwargs):
@@ -4813,12 +4654,12 @@ class Spec(object):
if name in self_nodes:
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
- nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
+ nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
- nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
+ nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
@@ -4891,40 +4732,252 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
# Update with additional constraints from other spec
for name in current_spec_constraint.direct_dep_difference(merged_spec):
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
- merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
+ merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
return merged_spec
-def _spec_from_old_dict(data):
- """Construct a spec from JSON/YAML using the format version 1.
- Note: Version 1 format has no notion of a build_spec, and names are
- guaranteed to be unique.
+class SpecfileReaderBase:
+ @classmethod
+ def from_node_dict(cls, node):
+ spec = Spec()
- Parameters:
- data -- a nested dict/list data structure read from YAML or JSON.
- """
- nodes = data["spec"]
+ name, node = cls.name_and_data(node)
+ for h in ht.hashes:
+ setattr(spec, h.attr, node.get(h.name, None))
- # Read nodes out of list. Root spec is the first element;
- # dependencies are the following elements.
- dep_list = [Spec.from_node_dict(node) for node in nodes]
- if not dep_list:
- raise spack.error.SpecError("YAML spec contains no nodes.")
- deps = dict((spec.name, spec) for spec in dep_list)
- spec = dep_list[0]
+ spec.name = name
+ spec.namespace = node.get("namespace", None)
+
+ if "version" in node or "versions" in node:
+ spec.versions = vn.VersionList.from_dict(node)
- for node in nodes:
- # get dependency dict from the node.
+ if "arch" in node:
+ spec.architecture = ArchSpec.from_dict(node)
+
+ if "compiler" in node:
+ spec.compiler = CompilerSpec.from_dict(node)
+ else:
+ spec.compiler = None
+
+ for name, values in node.get("parameters", {}).items():
+ if name in _valid_compiler_flags:
+ spec.compiler_flags[name] = []
+ for val in values:
+ spec.compiler_flags.add_flag(name, val, False)
+ else:
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
+
+ spec.external_path = None
+ spec.external_modules = None
+ if "external" in node:
+ # This conditional is needed because sometimes this function is
+ # called with a node already constructed that contains a 'versions'
+ # and 'external' field. Related to virtual packages provider
+ # indexes.
+ if node["external"]:
+ spec.external_path = node["external"]["path"]
+ spec.external_modules = node["external"]["module"]
+ if spec.external_modules is False:
+ spec.external_modules = None
+ spec.extra_attributes = node["external"].get(
+ "extra_attributes", syaml.syaml_dict()
+ )
+
+ # specs read in are concrete unless marked abstract
+ spec._concrete = node.get("concrete", True)
+
+ if "patches" in node:
+ patches = node["patches"]
+ if len(patches) > 0:
+ mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
+ mvar.value = patches
+ # FIXME: Monkey patches mvar to store patches order
+ mvar._patches_in_order_of_appearance = patches
+
+ # Don't read dependencies here; from_dict() is used by
+ # from_yaml() and from_json() to read the root *and* each dependency
+ # spec.
+
+ return spec
+
+ @classmethod
+ def _load(cls, data):
+ """Construct a spec from JSON/YAML using the format version 2.
+
+ This format is used in Spack v0.17, was introduced in
+ https://github.com/spack/spack/pull/22845
+
+ Args:
+ data: a nested dict/list data structure read from YAML or JSON.
+ """
+ # Current specfile format
+ nodes = data["spec"]["nodes"]
+ hash_type = None
+ any_deps = False
+
+ # Pass 0: Determine hash type
+ for node in nodes:
+ for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
+ any_deps = True
+ if dhash_type:
+ hash_type = dhash_type
+ break
+
+ if not any_deps: # If we never see a dependency...
+ hash_type = ht.dag_hash.name
+ elif not hash_type: # Seen a dependency, still don't know hash_type
+ raise spack.error.SpecError(
+ "Spec dictionary contains malformed dependencies. Old format?"
+ )
+
+ hash_dict = {}
+ root_spec_hash = None
+
+ # Pass 1: Create a single lookup dictionary by hash
+ for i, node in enumerate(nodes):
+ node_hash = node[hash_type]
+ node_spec = cls.from_node_dict(node)
+ hash_dict[node_hash] = node
+ hash_dict[node_hash]["node_spec"] = node_spec
+ if i == 0:
+ root_spec_hash = node_hash
+
+ if not root_spec_hash:
+ raise spack.error.SpecError("Spec dictionary contains no nodes.")
+
+ # Pass 2: Finish construction of all DAG edges (including build specs)
+ for node_hash, node in hash_dict.items():
+ node_spec = node["node_spec"]
+ for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
+ node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
+ if "build_spec" in node.keys():
+ _, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
+ node_spec._build_spec = hash_dict[bhash]["node_spec"]
+
+ return hash_dict[root_spec_hash]["node_spec"]
+
+
+class SpecfileV1(SpecfileReaderBase):
+ @classmethod
+ def load(cls, data):
+ """Construct a spec from JSON/YAML using the format version 1.
+
+ Note: Version 1 format has no notion of a build_spec, and names are
+ guaranteed to be unique. This function is guaranteed to read specs as
+ old as v0.10 - while it was not checked for older formats.
+
+ Args:
+ data: a nested dict/list data structure read from YAML or JSON.
+ """
+ nodes = data["spec"]
+
+ # Read nodes out of list. Root spec is the first element;
+ # dependencies are the following elements.
+ dep_list = [cls.from_node_dict(node) for node in nodes]
+ if not dep_list:
+ raise spack.error.SpecError("specfile contains no nodes.")
+
+ deps = {spec.name: spec for spec in dep_list}
+ result = dep_list[0]
+
+ for node in nodes:
+ # get dependency dict from the node.
+ name, data = cls.name_and_data(node)
+ for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
+ deps[name]._add_dependency(deps[dname], deptypes=dtypes)
+
+ return result
+
+ @classmethod
+ def name_and_data(cls, node):
name = next(iter(node))
+ node = node[name]
+ return name, node
+
+ @classmethod
+ def dependencies_from_node_dict(cls, node):
+ if "dependencies" not in node:
+ return []
+
+ for t in cls.read_specfile_dep_specs(node["dependencies"]):
+ yield t
+
+ @classmethod
+ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
+ """Read the DependencySpec portion of a YAML-formatted Spec.
+ This needs to be backward-compatible with older spack spec
+ formats so that reindex will work on old specs/databases.
+ """
+ for dep_name, elt in deps.items():
+ if isinstance(elt, dict):
+ for h in ht.hashes:
+ if h.name in elt:
+ dep_hash, deptypes = elt[h.name], elt["type"]
+ hash_type = h.name
+ break
+ else: # We never determined a hash type...
+ raise spack.error.SpecError("Couldn't parse dependency spec.")
+ else:
+ raise spack.error.SpecError("Couldn't parse dependency types in spec.")
+ yield dep_name, dep_hash, list(deptypes), hash_type
+
+
+class SpecfileV2(SpecfileReaderBase):
+ @classmethod
+ def load(cls, data):
+ result = cls._load(data)
+ return result
+
+ @classmethod
+ def name_and_data(cls, node):
+ return node["name"], node
+
+ @classmethod
+ def dependencies_from_node_dict(cls, node):
+ return cls.read_specfile_dep_specs(node.get("dependencies", []))
- if "dependencies" not in node[name]:
- continue
+ @classmethod
+ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
+ """Read the DependencySpec portion of a YAML-formatted Spec.
+ This needs to be backward-compatible with older spack spec
+ formats so that reindex will work on old specs/databases.
+ """
+ if not isinstance(deps, list):
+ raise spack.error.SpecError("Spec dictionary contains malformed dependencies")
+
+ result = []
+ for dep in deps:
+ elt = dep
+ dep_name = dep["name"]
+ if isinstance(elt, dict):
+ # new format: elements of dependency spec are keyed.
+ for h in ht.hashes:
+ if h.name in elt:
+ dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
+ break
+ else: # We never determined a hash type...
+ raise spack.error.SpecError("Couldn't parse dependency spec.")
+ else:
+ raise spack.error.SpecError("Couldn't parse dependency types in spec.")
+ result.append((dep_name, dep_hash, list(deptypes), hash_type))
+ return result
+
+ @classmethod
+ def extract_info_from_dep(cls, elt, hash):
+ dep_hash, deptypes = elt[hash.name], elt["type"]
+ hash_type = hash.name
+ virtuals = []
+ return dep_hash, deptypes, hash_type, virtuals
+
+ @classmethod
+ def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
+ build_spec_dict = node["build_spec"]
+ return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
- for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
- deps[name]._add_dependency(deps[dname], dtypes)
- return spec
+class SpecfileV3(SpecfileV2):
+ pass
class LazySpecCache(collections.defaultdict):
diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py
index 19e021bf84..14cf5c3c6b 100644
--- a/lib/spack/spack/test/cmd/test.py
+++ b/lib/spack/spack/test/cmd/test.py
@@ -258,7 +258,7 @@ def test_has_test_method_fails(capsys):
assert "is not a class" in captured
-def test_read_old_results(mock_test_stage):
+def test_read_old_results(mock_packages, mock_test_stage):
"""Take test data generated before the switch to full hash everywhere
and make sure we can still read it in"""
# Test data was generated with:
diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
new file mode 100644
index 0000000000..3f10fbacd9
--- /dev/null
+++ b/lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
Binary files differ
diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
new file mode 100644
index 0000000000..928fe4a3d7
--- /dev/null
+++ b/lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
Binary files differ
diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
new file mode 100644
index 0000000000..31585ded3e
--- /dev/null
+++ b/lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
Binary files differ
diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
new file mode 100644
index 0000000000..eb3aa197a4
--- /dev/null
+++ b/lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
Binary files differ
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 87cd11cefb..5afc831096 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -125,7 +125,7 @@ def test_installed_deps(monkeypatch, mock_packages):
# use the installed C. It should *not* force A to use the installed D
# *if* we're doing a fresh installation.
a_spec = Spec(a)
- a_spec._add_dependency(c_spec, ("build", "link"))
+ a_spec._add_dependency(c_spec, deptypes=("build", "link"))
a_spec.concretize()
assert spack.version.Version("2") == a_spec[c][d].version
assert spack.version.Version("2") == a_spec[e].version
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
a_spec = Spec("a")
- a_spec._add_dependency(b_spec, ("build", "link"))
+ a_spec._add_dependency(b_spec, deptypes=("build", "link"))
a_spec.concretize()
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
@@ -992,9 +992,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
link_run_spec = Spec("c@1.0").concretized()
build_spec = Spec("c@2.0").concretized()
- root.add_dependency_edge(link_run_spec, deptype="link")
- root.add_dependency_edge(link_run_spec, deptype="run")
- root.add_dependency_edge(build_spec, deptype="build")
+ root.add_dependency_edge(link_run_spec, deptypes="link")
+ root.add_dependency_edge(link_run_spec, deptypes="run")
+ root.add_dependency_edge(build_spec, deptypes="build")
# Check dependencies from the perspective of root
assert len(root.dependencies()) == 2
@@ -1020,7 +1020,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
root = Spec("b@2.0").concretized()
bootstrap = Spec("b@1.0").concretized()
- root.add_dependency_edge(bootstrap, deptype="build")
+ root.add_dependency_edge(bootstrap, deptypes="build")
assert len(root.dependencies()) == 1
assert root.dependencies()[0].name == "b"
@@ -1039,7 +1039,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
bootstrap = Spec("b@1.0").concretized()
for current_deptype in ("build", "link", "run"):
- root.add_dependency_edge(bootstrap, deptype=current_deptype)
+ root.add_dependency_edge(bootstrap, deptypes=current_deptype)
# Check edges in dependencies
assert len(root.edges_to_dependencies()) == 1
@@ -1066,9 +1066,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
c1 = Spec("b@1.0").concretized()
c2 = Spec("b@2.0").concretized()
- p.add_dependency_edge(c1, deptype=c1_deptypes)
+ p.add_dependency_edge(c1, deptypes=c1_deptypes)
with pytest.raises(spack.error.SpackError):
- p.add_dependency_edge(c2, deptype=c2_deptypes)
+ p.add_dependency_edge(c2, deptypes=c2_deptypes)
@pytest.mark.regression("33499")
@@ -1087,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
z3_flavor_1 = Spec("z3 +through_a1")
z3_flavor_2 = Spec("z3 +through_z1")
- root.add_dependency_edge(a1, deptype=("build", "run", "test"))
+ root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
# unique package as a dep of a build/run/test type dep.
- a1.add_dependency_edge(a2, deptype="all")
- a1.add_dependency_edge(z3_flavor_1, deptype="all")
+ a1.add_dependency_edge(a2, deptypes="all")
+ a1.add_dependency_edge(z3_flavor_1, deptypes="all")
# chain of link type deps root -> z1 -> z2 -> z3
- root.add_dependency_edge(z1, deptype="link")
- z1.add_dependency_edge(z2, deptype="link")
- z2.add_dependency_edge(z3_flavor_2, deptype="link")
+ root.add_dependency_edge(z1, deptypes="link")
+ z1.add_dependency_edge(z2, deptypes="link")
+ z2.add_dependency_edge(z3_flavor_2, deptypes="link")
# Indexing should prefer the link-type dep.
assert "through_z1" in root["z3"].variants
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 0075bfc384..c09ddeb920 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -1108,7 +1108,7 @@ class TestSpecSematics(object):
def test_satisfies_dependencies_ordered(self):
d = Spec("zmpi ^fake")
s = Spec("mpileaks")
- s._add_dependency(d, ())
+ s._add_dependency(d, deptypes=())
assert s.satisfies("mpileaks ^zmpi ^fake", strict=True)
@pytest.mark.parametrize("transitive", [True, False])
@@ -1156,7 +1156,9 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
def test_malformed_spec_dict():
with pytest.raises(SpecError, match="malformed"):
- Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}})
+ Spec.from_dict(
+ {"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
+ )
def test_spec_dict_hashless_dep():
@@ -1164,9 +1166,10 @@ def test_spec_dict_hashless_dep():
Spec.from_dict(
{
"spec": {
+ "_meta": {"version": 2},
"nodes": [
{"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]}
- ]
+ ],
}
}
)
@@ -1252,7 +1255,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
# add it to an abstract spec as a dependency
top = Spec("dt-diamond")
- top.add_dependency_edge(bottom, ())
+ top.add_dependency_edge(bottom, deptypes=())
# concretize with the already-concrete dependency
top.concretize()
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index 5a88167572..48826dc493 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -13,7 +13,9 @@ from __future__ import print_function
import ast
import collections
import collections.abc
+import gzip
import inspect
+import json
import os
import pytest
@@ -507,3 +509,33 @@ ordered_spec = collections.OrderedDict(
("version", "1.2.11"),
]
)
+
+
+@pytest.mark.parametrize(
+ "specfile,expected_hash,reader_cls",
+ [
+ # First version supporting JSON format for specs
+ ("specfiles/hdf5.v013.json.gz", "vglgw4reavn65vx5d4dlqn6rjywnq76d", spack.spec.SpecfileV1),
+ # Introduces full hash in the format, still has 3 hashes
+ ("specfiles/hdf5.v016.json.gz", "stp45yvzte43xdauknaj3auxlxb4xvzs", spack.spec.SpecfileV1),
+ # Introduces "build_specs", see https://github.com/spack/spack/pull/22845
+ ("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
+ # Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
+ ("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
+ ],
+)
+def test_load_json_specfiles(specfile, expected_hash, reader_cls):
+ fullpath = os.path.join(spack.paths.test_path, "data", specfile)
+ with gzip.open(fullpath, "rt", encoding="utf-8") as f:
+ data = json.load(f)
+
+ s1 = Spec.from_dict(data)
+ s2 = reader_cls.load(data)
+
+ assert s2.dag_hash() == expected_hash
+ assert s1.dag_hash() == s2.dag_hash()
+ assert s1 == s2
+ assert Spec.from_json(s2.to_json()).dag_hash() == s2.dag_hash()
+
+ openmpi_edges = s2.edges_to_dependencies(name="openmpi")
+ assert len(openmpi_edges) == 1
diff --git a/lib/spack/spack/test/traverse.py b/lib/spack/spack/test/traverse.py
index 1bc3d69cfe..1334f33883 100644
--- a/lib/spack/spack/test/traverse.py
+++ b/lib/spack/spack/test/traverse.py
@@ -18,8 +18,8 @@ def create_dag(nodes, edges):
dict: mapping from package name to abstract Spec with proper deps.
"""
specs = {name: Spec(name) for name in nodes}
- for parent, child, deptype in edges:
- specs[parent].add_dependency_edge(specs[child], deptype)
+ for parent, child, deptypes in edges:
+ specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
return specs