summaryrefslogtreecommitdiff
path: root/lib/spack/spack/spec.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/spack/spack/spec.py')
-rw-r--r--lib/spack/spack/spec.py2559
1 files changed, 1182 insertions, 1377 deletions
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index f37d993993..1c5f96ee72 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -26,7 +26,7 @@ line is a spec for a particular installation of the mpileaks package.
version, like "1.2", or it can be a range of versions, e.g. "1.2:1.4".
If multiple specific versions or multiple ranges are acceptable, they
can be separated by commas, e.g. if a package will only build with
- versions 1.0, 1.2-1.4, and 1.6-1.8 of mavpich, you could say:
+ versions 1.0, 1.2-1.4, and 1.6-1.8 of mvapich, you could say:
depends_on("mvapich@1.0,1.2:1.4,1.6:1.8")
@@ -59,7 +59,9 @@ import platform
import re
import socket
import warnings
-from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
+from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
+
+import archspec.cpu
import llnl.path
import llnl.string
@@ -68,24 +70,21 @@ import llnl.util.lang as lang
import llnl.util.tty as tty
import llnl.util.tty.color as clr
+import spack
import spack.compiler
import spack.compilers
import spack.config
-import spack.dependency as dp
import spack.deptypes as dt
import spack.error
import spack.hash_types as ht
import spack.parser
-import spack.patch
import spack.paths
import spack.platforms
import spack.provider_index
import spack.repo
import spack.solver
import spack.store
-import spack.target
import spack.traverse as traverse
-import spack.util.crypto
import spack.util.executable
import spack.util.hash
import spack.util.module_cmd as md
@@ -96,11 +95,13 @@ import spack.variant as vt
import spack.version as vn
import spack.version.git_ref_lookup
+from .enums import InstallRecordStatus
+
__all__ = [
"CompilerSpec",
"Spec",
"SpecParseError",
- "ArchitecturePropagationError",
+ "UnsupportedPropagationError",
"DuplicateDependencyError",
"DuplicateCompilerSpecError",
"UnsupportedCompilerError",
@@ -121,49 +122,57 @@ __all__ = [
"SpecDeprecatedError",
]
+
+SPEC_FORMAT_RE = re.compile(
+ r"(?:" # this is one big or, with matches ordered by priority
+ # OPTION 1: escaped character (needs to be first to catch opening \{)
+ # Note that an unterminated \ at the end of a string is left untouched
+ r"(?:\\(.))"
+ r"|" # or
+ # OPTION 2: an actual format string
+ r"{" # non-escaped open brace {
+ r"([%@/]|[\w ][\w -]*=)?" # optional sigil (or identifier or space) to print sigil in color
+ r"(?:\^([^}\.]+)\.)?" # optional ^depname. (to get attr from dependency)
+ # after the sigil or depname, we can have a hash expression or another attribute
+ r"(?:" # one of
+ r"(hash\b)(?:\:(\d+))?" # hash followed by :<optional length>
+ r"|" # or
+ r"([^}]*)" # another attribute to format
+ r")" # end one of
+ r"(})?" # finish format string with non-escaped close brace }, or missing if not present
+ r"|"
+ # OPTION 3: mismatched close brace (option 2 would consume a matched open brace)
+ r"(})" # brace
+ r")",
+ re.IGNORECASE,
+)
+
#: Valid pattern for an identifier in Spack
IDENTIFIER_RE = r"\w[\w-]*"
+# Coloring of specs when using color output. Fields are printed with
+# different colors to enhance readability.
+# See llnl.util.tty.color for descriptions of the color codes.
COMPILER_COLOR = "@g" #: color for highlighting compilers
VERSION_COLOR = "@c" #: color for highlighting versions
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
-ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
-DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
-DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
+VARIANT_COLOR = "@B" #: color for highlighting variants
HASH_COLOR = "@K" #: color for highlighting package hashes
-#: This map determines the coloring of specs when using color output.
-#: We make the fields different colors to enhance readability.
-#: See llnl.util.tty.color for descriptions of the color codes.
-COLOR_FORMATS = {
- "%": COMPILER_COLOR,
- "@": VERSION_COLOR,
- "=": ARCHITECTURE_COLOR,
- "+": ENABLED_VARIANT_COLOR,
- "~": DISABLED_VARIANT_COLOR,
- "^": DEPENDENCY_COLOR,
- "#": HASH_COLOR,
-}
-
-#: Regex used for splitting by spec field separators.
-#: These need to be escaped to avoid metacharacters in
-#: ``COLOR_FORMATS.keys()``.
-_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
-
#: Default format for Spec.format(). This format can be round-tripped, so that:
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
"{name}{@versions}"
"{%compiler.name}{@compiler.versions}{compiler_flags}"
- "{variants}{arch=architecture}{/abstract_hash}"
+ "{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
)
#: Display format, which eliminates extra `@=` in the output, for readability.
DISPLAY_FORMAT = (
"{name}{@version}"
"{%compiler.name}{@compiler.version}{compiler_flags}"
- "{variants}{arch=architecture}{/abstract_hash}"
+ "{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
)
#: Regular expression to pull spec contents out of clearsigned signature
@@ -193,24 +202,24 @@ class InstallStatus(enum.Enum):
missing = "@r{[-]} "
-def colorize_spec(spec):
- """Returns a spec colorized according to the colors specified in
- COLOR_FORMATS."""
+# regexes used in spec formatting
+OLD_STYLE_FMT_RE = re.compile(r"\${[A-Z]+}")
- class insert_color:
- def __init__(self):
- self.last = None
- def __call__(self, match):
- # ignore compiler versions (color same as compiler)
- sep = match.group(0)
- if self.last == "%" and sep == "@":
- return clr.cescape(sep)
- self.last = sep
+def ensure_modern_format_string(fmt: str) -> None:
+ """Ensure that the format string does not contain old ${...} syntax."""
+ result = OLD_STYLE_FMT_RE.search(fmt)
+ if result:
+ raise SpecFormatStringError(
+ f"Format string `{fmt}` contains old syntax `{result.group(0)}`. "
+ "This is no longer supported."
+ )
- return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
- return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
+def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
+ if isinstance(name, archspec.cpu.Microarchitecture):
+ return name
+ return archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
@lang.lazy_lexicographic_ordering
@@ -301,7 +310,10 @@ class ArchSpec:
def _cmp_iter(self):
yield self.platform
yield self.os
- yield self.target
+ if self.target is None:
+ yield self.target
+ else:
+ yield self.target.name
@property
def platform(self):
@@ -360,10 +372,10 @@ class ArchSpec:
# will assumed to be the host machine's platform.
def target_or_none(t):
- if isinstance(t, spack.target.Target):
+ if isinstance(t, archspec.cpu.Microarchitecture):
return t
if t and t != "None":
- return spack.target.Target(t)
+ return _make_microarchitecture(t)
return None
value = target_or_none(value)
@@ -452,10 +464,11 @@ class ArchSpec:
results = self._target_intersection(other)
attribute_str = ",".join(results)
- if self.target == attribute_str:
+ intersection_target = _make_microarchitecture(attribute_str)
+ if self.target == intersection_target:
return False
- self.target = attribute_str
+ self.target = intersection_target
return True
def _target_intersection(self, other):
@@ -473,7 +486,7 @@ class ArchSpec:
# s_target_range is a concrete target
# get a microarchitecture reference for at least one side
# of each comparison so we can use archspec comparators
- s_comp = spack.target.Target(s_min).microarchitecture
+ s_comp = _make_microarchitecture(s_min)
if not o_sep:
if s_min == o_min:
results.append(s_min)
@@ -481,21 +494,21 @@ class ArchSpec:
results.append(s_min)
elif not o_sep:
# "cast" to microarchitecture
- o_comp = spack.target.Target(o_min).microarchitecture
+ o_comp = _make_microarchitecture(o_min)
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
results.append(o_min)
else:
# Take intersection of two ranges
# Lots of comparisons needed
- _s_min = spack.target.Target(s_min).microarchitecture
- _s_max = spack.target.Target(s_max).microarchitecture
- _o_min = spack.target.Target(o_min).microarchitecture
- _o_max = spack.target.Target(o_max).microarchitecture
+ _s_min = _make_microarchitecture(s_min)
+ _s_max = _make_microarchitecture(s_max)
+ _o_min = _make_microarchitecture(o_min)
+ _o_max = _make_microarchitecture(o_max)
n_min = s_min if _s_min >= _o_min else o_min
n_max = s_max if _s_max <= _o_max else o_max
- _n_min = spack.target.Target(n_min).microarchitecture
- _n_max = spack.target.Target(n_max).microarchitecture
+ _n_min = _make_microarchitecture(n_min)
+ _n_max = _make_microarchitecture(n_max)
if _n_min == _n_max:
results.append(n_min)
elif not n_min or not n_max or _n_min < _n_max:
@@ -548,12 +561,18 @@ class ArchSpec:
)
def to_dict(self):
+ # Generic targets represent either an architecture family (like x86_64)
+ # or a custom micro-architecture
+ if self.target.vendor == "generic":
+ target_data = str(self.target)
+ else:
+ # Get rid of compiler flag information before turning the uarch into a dict
+ uarch_dict = self.target.to_dict()
+ uarch_dict.pop("compilers", None)
+ target_data = syaml.syaml_dict(uarch_dict.items())
+
d = syaml.syaml_dict(
- [
- ("platform", self.platform),
- ("platform_os", self.os),
- ("target", self.target.to_dict_or_value()),
- ]
+ [("platform", self.platform), ("platform_os", self.os), ("target", target_data)]
)
return syaml.syaml_dict([("arch", d)])
@@ -561,7 +580,10 @@ class ArchSpec:
def from_dict(d):
"""Import an ArchSpec from raw YAML/JSON data"""
arch = d["arch"]
- target = spack.target.Target.from_dict_or_value(arch["target"])
+ target_name = arch["target"]
+ if not isinstance(target_name, str):
+ target_name = target_name["name"]
+ target = _make_microarchitecture(target_name)
return ArchSpec((arch["platform"], arch["platform_os"], target))
def __str__(self):
@@ -612,18 +634,6 @@ class CompilerSpec:
else:
raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs)
- def _add_versions(self, version_list):
- # If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.any_version:
- # Note: This may be impossible to reach by the current parser
- # Keeping it in case the implementation changes.
- raise MultipleVersionError(
- "A spec cannot contain multiple version signifiers. Use a version list instead."
- )
- self.versions = vn.VersionList()
- for version in version_list:
- self.versions.add(version)
-
def _autospec(self, compiler_spec_like):
if isinstance(compiler_spec_like, CompilerSpec):
return compiler_spec_like
@@ -792,17 +802,49 @@ class CompilerFlag(str):
propagate (bool): if ``True`` the flag value will
be passed to the package's dependencies. If
``False`` it will not
+ flag_group (str): if this flag was introduced along
+ with several flags via a single source, then
+ this will store all such flags
+ source (str): identifies the type of constraint that
+ introduced this flag (e.g. if a package has
+ ``depends_on(... cflags=-g)``, then the ``source``
+ for "-g" would indicate ``depends_on``.
"""
def __new__(cls, value, **kwargs):
obj = str.__new__(cls, value)
obj.propagate = kwargs.pop("propagate", False)
+ obj.flag_group = kwargs.pop("flag_group", value)
+ obj.source = kwargs.pop("source", None)
return obj
_valid_compiler_flags = ["cflags", "cxxflags", "fflags", "ldflags", "ldlibs", "cppflags"]
+def _shared_subset_pair_iterate(container1, container2):
+ """
+ [0, a, c, d, f]
+ [a, d, e, f]
+
+ yields [(a, a), (d, d), (f, f)]
+
+ no repeated elements
+ """
+ a_idx, b_idx = 0, 0
+ max_a, max_b = len(container1), len(container2)
+ while a_idx < max_a and b_idx < max_b:
+ if container1[a_idx] == container2[b_idx]:
+ yield (container1[a_idx], container2[b_idx])
+ a_idx += 1
+ b_idx += 1
+ else:
+ while container1[a_idx] < container2[b_idx]:
+ a_idx += 1
+ while container1[a_idx] > container2[b_idx]:
+ b_idx += 1
+
+
class FlagMap(lang.HashableMap):
__slots__ = ("spec",)
@@ -811,23 +853,9 @@ class FlagMap(lang.HashableMap):
self.spec = spec
def satisfies(self, other):
- return all(f in self and self[f] == other[f] for f in other)
+ return all(f in self and set(self[f]) >= set(other[f]) for f in other)
def intersects(self, other):
- common_types = set(self) & set(other)
- for flag_type in common_types:
- if not self[flag_type] or not other[flag_type]:
- # At least one of the two is empty
- continue
-
- if self[flag_type] != other[flag_type]:
- return False
-
- if not all(
- f1.propagate == f2.propagate for f1, f2 in zip(self[flag_type], other[flag_type])
- ):
- # At least one propagation flag didn't match
- return False
return True
def constrain(self, other):
@@ -835,28 +863,29 @@ class FlagMap(lang.HashableMap):
Return whether the spec changed.
"""
- if other.spec and other.spec._concrete:
- for k in self:
- if k not in other:
- raise UnsatisfiableCompilerFlagSpecError(self[k], "<absent>")
-
changed = False
- for k in other:
- if k in self and not set(self[k]) <= set(other[k]):
- raise UnsatisfiableCompilerFlagSpecError(
- " ".join(f for f in self[k]), " ".join(f for f in other[k])
- )
- elif k not in self:
- self[k] = other[k]
+ for flag_type in other:
+ if flag_type not in self:
+ self[flag_type] = other[flag_type]
changed = True
+ else:
+ extra_other = set(other[flag_type]) - set(self[flag_type])
+ if extra_other:
+ self[flag_type] = list(self[flag_type]) + list(
+ x for x in other[flag_type] if x in extra_other
+ )
+ changed = True
+
+ # Next, if any flags in other propagate, we force them to propagate in our case
+ shared = list(sorted(set(other[flag_type]) - extra_other))
+ for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
+ if y.propagate is True and x.propagate is False:
+ changed = True
+ y.propagate = False
+
+ # TODO: what happens if flag groups with a partial (but not complete)
+ # intersection specify different behaviors for flag propagation?
- # Check that the propagation values match
- if self[k] == other[k]:
- for i in range(len(other[k])):
- if self[k][i].propagate != other[k][i].propagate:
- raise UnsatisfiableCompilerFlagSpecError(
- self[k][i].propagate, other[k][i].propagate
- )
return changed
@staticmethod
@@ -869,7 +898,7 @@ class FlagMap(lang.HashableMap):
clone[name] = compiler_flag
return clone
- def add_flag(self, flag_type, value, propagation):
+ def add_flag(self, flag_type, value, propagation, flag_group=None, source=None):
"""Stores the flag's value in CompilerFlag and adds it
to the FlagMap
@@ -880,7 +909,8 @@ class FlagMap(lang.HashableMap):
propagation (bool): if ``True`` the flag value will be passed to
the packages' dependencies. If``False`` it will not be passed
"""
- flag = CompilerFlag(value, propagate=propagation)
+ flag_group = flag_group or value
+ flag = CompilerFlag(value, propagate=propagation, flag_group=flag_group, source=source)
if flag_type not in self:
self[flag_type] = [flag]
@@ -906,10 +936,14 @@ class FlagMap(lang.HashableMap):
def flags():
for flag in v:
yield flag
+ yield flag.propagate
yield flags
def __str__(self):
+ if not self:
+ return ""
+
sorted_items = sorted((k, v) for k, v in self.items() if v)
result = ""
@@ -933,10 +967,6 @@ def _sort_by_dep_types(dspec: DependencySpec):
return dspec.depflag
-#: Enum for edge directions
-EdgeDirection = lang.enum(parent=0, child=1)
-
-
@lang.lazy_lexicographic_ordering
class _EdgeMap(collections.abc.Mapping):
"""Represent a collection of edges (DependencySpec objects) in the DAG.
@@ -950,26 +980,20 @@ class _EdgeMap(collections.abc.Mapping):
__slots__ = "edges", "store_by_child"
- def __init__(self, store_by=EdgeDirection.child):
- # Sanitize input arguments
- msg = 'unexpected value for "store_by" argument'
- assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
-
- #: This dictionary maps a package name to a list of edges
- #: i.e. to a list of DependencySpec objects
- self.edges = {}
- self.store_by_child = store_by == EdgeDirection.child
+ def __init__(self, store_by_child: bool = True) -> None:
+ self.edges: Dict[str, List[DependencySpec]] = {}
+ self.store_by_child = store_by_child
- def __getitem__(self, key):
+ def __getitem__(self, key: str) -> List[DependencySpec]:
return self.edges[key]
def __iter__(self):
return iter(self.edges)
- def __len__(self):
+ def __len__(self) -> int:
return len(self.edges)
- def add(self, edge: DependencySpec):
+ def add(self, edge: DependencySpec) -> None:
key = edge.spec.name if self.store_by_child else edge.parent.name
if key in self.edges:
lst = self.edges[key]
@@ -978,8 +1002,8 @@ class _EdgeMap(collections.abc.Mapping):
else:
self.edges[key] = [edge]
- def __str__(self):
- return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
+ def __str__(self) -> str:
+ return f"{{deps: {', '.join(str(d) for d in sorted(self.values()))}}}"
def _cmp_iter(self):
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
@@ -996,24 +1020,32 @@ class _EdgeMap(collections.abc.Mapping):
return clone
- def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
- """Select a list of edges and return them.
+ def select(
+ self,
+ *,
+ parent: Optional[str] = None,
+ child: Optional[str] = None,
+ depflag: dt.DepFlag = dt.ALL,
+ virtuals: Optional[List[str]] = None,
+ ) -> List[DependencySpec]:
+ """Selects a list of edges and returns them.
If an edge:
+
- Has *any* of the dependency types passed as argument,
- - Matches the parent and/or child name, if passed
+ - Matches the parent and/or child name
+ - Provides *any* of the virtuals passed as argument
+
then it is selected.
The deptypes argument needs to be a flag, since the method won't
convert it for performance reason.
Args:
- parent (str): name of the parent package
- child (str): name of the child package
+ parent: name of the parent package
+ child: name of the child package
depflag: allowed dependency types in flag form
-
- Returns:
- List of DependencySpec objects
+ virtuals: list of virtuals on the edge
"""
if not depflag:
return []
@@ -1032,22 +1064,23 @@ class _EdgeMap(collections.abc.Mapping):
# Filter by allowed dependency types
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
+ # Filter by virtuals
+ if virtuals is not None:
+ selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
+
return list(selected)
def clear(self):
self.edges.clear()
-def _command_default_handler(descriptor, spec, cls):
+def _command_default_handler(spec: "Spec"):
"""Default handler when looking for the 'command' attribute.
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
Parameters:
- descriptor (ForwardQueryToPackage): descriptor that triggered the call
- spec (Spec): spec that is being queried
- cls (type(spec)): type of spec, to match the signature of the
- descriptor ``__get__`` method
+ spec: spec that is being queried
Returns:
Executable: An executable of the command
@@ -1060,22 +1093,17 @@ def _command_default_handler(descriptor, spec, cls):
if fs.is_exe(path):
return spack.util.executable.Executable(path)
- else:
- msg = "Unable to locate {0} command in {1}"
- raise RuntimeError(msg.format(spec.name, home.bin))
+ raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
-def _headers_default_handler(descriptor, spec, cls):
+def _headers_default_handler(spec: "Spec"):
"""Default handler when looking for the 'headers' attribute.
Tries to search for ``*.h`` files recursively starting from
``spec.package.home.include``.
Parameters:
- descriptor (ForwardQueryToPackage): descriptor that triggered the call
- spec (Spec): spec that is being queried
- cls (type(spec)): type of spec, to match the signature of the
- descriptor ``__get__`` method
+ spec: spec that is being queried
Returns:
HeaderList: The headers in ``prefix.include``
@@ -1088,12 +1116,10 @@ def _headers_default_handler(descriptor, spec, cls):
if headers:
return headers
- else:
- msg = "Unable to locate {0} headers in {1}"
- raise spack.error.NoHeadersError(msg.format(spec.name, home))
+ raise spack.error.NoHeadersError(f"Unable to locate {spec.name} headers in {home}")
-def _libs_default_handler(descriptor, spec, cls):
+def _libs_default_handler(spec: "Spec"):
"""Default handler when looking for the 'libs' attribute.
Tries to search for ``lib{spec.name}`` recursively starting from
@@ -1101,10 +1127,7 @@ def _libs_default_handler(descriptor, spec, cls):
``{spec.name}`` instead.
Parameters:
- descriptor (ForwardQueryToPackage): descriptor that triggered the call
- spec (Spec): spec that is being queried
- cls (type(spec)): type of spec, to match the signature of the
- descriptor ``__get__`` method
+ spec: spec that is being queried
Returns:
LibraryList: The libraries found
@@ -1138,32 +1161,38 @@ def _libs_default_handler(descriptor, spec, cls):
for shared in search_shared:
# Since we are searching for link libraries, on Windows search only for
- # ".Lib" extensions by default as those represent import libraries for implict links.
+ # ".Lib" extensions by default as those represent import libraries for implicit links.
libs = fs.find_libraries(name, home, shared=shared, recursive=True, runtime=False)
if libs:
return libs
- msg = "Unable to recursively locate {0} libraries in {1}"
- raise spack.error.NoLibrariesError(msg.format(spec.name, home))
+ raise spack.error.NoLibrariesError(
+ f"Unable to recursively locate {spec.name} libraries in {home}"
+ )
class ForwardQueryToPackage:
"""Descriptor used to forward queries from Spec to Package"""
- def __init__(self, attribute_name, default_handler=None):
+ def __init__(
+ self,
+ attribute_name: str,
+ default_handler: Optional[Callable[["Spec"], Any]] = None,
+ _indirect: bool = False,
+ ) -> None:
"""Create a new descriptor.
Parameters:
- attribute_name (str): name of the attribute to be
- searched for in the Package instance
- default_handler (callable, optional): default function to be
- called if the attribute was not found in the Package
- instance
+ attribute_name: name of the attribute to be searched for in the Package instance
+ default_handler: default function to be called if the attribute was not found in the
+ Package instance
+ _indirect: temporarily added to redirect a query to another package.
"""
self.attribute_name = attribute_name
self.default = default_handler
+ self.indirect = _indirect
- def __get__(self, instance, cls):
+ def __get__(self, instance: "SpecBuildInterface", cls):
"""Retrieves the property from Package using a well defined chain
of responsibility.
@@ -1185,13 +1214,18 @@ class ForwardQueryToPackage:
indicating a query failure, e.g. that library files were not found in a
'libs' query.
"""
- pkg = instance.package
+ # TODO: this indirection exist solely for `spec["python"].command` to actually return
+ # spec["python-venv"].command. It should be removed when `python` is a virtual.
+ if self.indirect and instance.indirect_spec:
+ pkg = instance.indirect_spec.package
+ else:
+ pkg = instance.wrapped_obj.package
try:
query = instance.last_query
except AttributeError:
# There has been no query yet: this means
# a spec is trying to access its own attributes
- _ = instance[instance.name] # NOQA: ignore=F841
+ _ = instance.wrapped_obj[instance.wrapped_obj.name] # NOQA: ignore=F841
query = instance.last_query
callbacks_chain = []
@@ -1203,7 +1237,8 @@ class ForwardQueryToPackage:
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
# Final resort : default callback
if self.default is not None:
- callbacks_chain.append(lambda: self.default(self, instance, cls))
+ _default = self.default # make mypy happy
+ callbacks_chain.append(lambda: _default(instance.wrapped_obj))
# Trigger the callbacks in order, the first one producing a
# value wins
@@ -1262,25 +1297,33 @@ QueryState = collections.namedtuple("QueryState", ["name", "extra_parameters", "
class SpecBuildInterface(lang.ObjectWrapper):
# home is available in the base Package so no default is needed
home = ForwardQueryToPackage("home", default_handler=None)
-
- command = ForwardQueryToPackage("command", default_handler=_command_default_handler)
-
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
-
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
+ command = ForwardQueryToPackage(
+ "command", default_handler=_command_default_handler, _indirect=True
+ )
- def __init__(self, spec, name, query_parameters):
+ def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
- self.token = original_spec, name, query_parameters
+ self.token = original_spec, name, query_parameters, _parent
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
+ # TODO: this ad-hoc logic makes `spec["python"].command` return
+ # `spec["python-venv"].command` and should be removed when `python` is a virtual.
+ self.indirect_spec = None
+ if spec.name == "python":
+ python_venvs = _parent.dependencies("python-venv")
+ if not python_venvs:
+ return
+ self.indirect_spec = python_venvs[0]
+
def __reduce__(self):
return SpecBuildInterface, self.token
@@ -1288,6 +1331,104 @@ class SpecBuildInterface(lang.ObjectWrapper):
return self.wrapped_obj.copy(*args, **kwargs)
+def tree(
+ specs: List["Spec"],
+ *,
+ color: Optional[bool] = None,
+ depth: bool = False,
+ hashes: bool = False,
+ hashlen: Optional[int] = None,
+ cover: str = "nodes",
+ indent: int = 0,
+ format: str = DEFAULT_FORMAT,
+ deptypes: Union[Tuple[str, ...], str] = "all",
+ show_types: bool = False,
+ depth_first: bool = False,
+ recurse_dependencies: bool = True,
+ status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
+ prefix: Optional[Callable[["Spec"], str]] = None,
+ key=id,
+) -> str:
+ """Prints out specs and their dependencies, tree-formatted with indentation.
+
+ Status function may either output a boolean or an InstallStatus
+
+ Args:
+ color: if True, always colorize the tree. If False, don't colorize the tree. If None,
+ use the default from llnl.tty.color
+ depth: print the depth from the root
+ hashes: if True, print the hash of each node
+ hashlen: length of the hash to be printed
+ cover: either "nodes" or "edges"
+ indent: extra indentation for the tree being printed
+ format: format to be used to print each node
+ deptypes: dependency types to be represented in the tree
+ show_types: if True, show the (merged) dependency type of a node
+ depth_first: if True, traverse the DAG depth first when representing it as a tree
+ recurse_dependencies: if True, recurse on dependencies
+ status_fn: optional callable that takes a node as an argument and return its
+ installation status
+ prefix: optional callable that takes a node as an argument and return its
+ installation prefix
+ """
+ out = ""
+
+ if color is None:
+ color = clr.get_color_when()
+
+ # reduce deptypes over all in-edges when covering nodes
+ if show_types and cover == "nodes":
+ deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
+ for edge in traverse.traverse_edges(specs, cover="edges", deptype=deptypes, root=False):
+ deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
+
+ for d, dep_spec in traverse.traverse_tree(
+ sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
+ ):
+ node = dep_spec.spec
+
+ if prefix is not None:
+ out += prefix(node)
+ out += " " * indent
+
+ if depth:
+ out += "%-4d" % d
+
+ if status_fn:
+ status = status_fn(node)
+ if status in list(InstallStatus):
+ out += clr.colorize(status.value, color=color)
+ elif status:
+ out += clr.colorize("@g{[+]} ", color=color)
+ else:
+ out += clr.colorize("@r{[-]} ", color=color)
+
+ if hashes:
+ out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
+
+ if show_types:
+ if cover == "nodes":
+ depflag = deptype_lookup[dep_spec.spec.dag_hash()]
+ else:
+ # when covering edges or paths, we show dependency
+ # types only for the edge through which we visited
+ depflag = dep_spec.depflag
+
+ type_chars = dt.flag_to_chars(depflag)
+ out += "[%s] " % type_chars
+
+ out += " " * d
+ if d > 0:
+ out += "^"
+ out += node.format(format, color=color) + "\n"
+
+ # Check if we wanted just the first line
+ if not recurse_dependencies:
+ break
+
+ return out
+
+
@lang.lazy_lexicographic_ordering(set_hash=False)
class Spec:
#: Cache for spec's prefix, computed lazily in the corresponding property
@@ -1331,12 +1472,12 @@ class Spec:
# init an empty spec that matches anything.
self.name = None
self.versions = vn.VersionList(":")
- self.variants = vt.VariantMap(self)
+ self.variants = VariantMap(self)
self.architecture = None
self.compiler = None
self.compiler_flags = FlagMap(self)
- self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
- self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
+ self._dependents = _EdgeMap(store_by_child=False)
+ self._dependencies = _EdgeMap(store_by_child=True)
self.namespace = None
# initial values for all spec hash types
@@ -1407,6 +1548,13 @@ class Spec:
def external(self):
return bool(self.external_path) or bool(self.external_modules)
+ @property
+ def is_develop(self):
+ """Return whether the Spec represents a user-developed package
+ in a Spack ``Environment`` (i.e. using `spack develop`).
+ """
+ return bool(self.variants.get("dev_path", False))
+
def clear_dependencies(self):
"""Trim the dependencies of this spec."""
self._dependencies.clear()
@@ -1448,25 +1596,34 @@ class Spec:
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
- def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
+ def edges_from_dependents(
+ self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
+ ) -> List[DependencySpec]:
"""Return a list of edges connecting this node in the DAG
to parents.
Args:
name (str): filter dependents by package name
depflag: allowed dependency types
+ virtuals: allowed virtuals
"""
- return [d for d in self._dependents.select(parent=name, depflag=depflag)]
+ return [
+ d for d in self._dependents.select(parent=name, depflag=depflag, virtuals=virtuals)
+ ]
- def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
- """Return a list of edges connecting this node in the DAG
- to children.
+ def edges_to_dependencies(
+ self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
+ ) -> List[DependencySpec]:
+ """Returns a list of edges connecting this node in the DAG to children.
Args:
name (str): filter dependencies by package name
depflag: allowed dependency types
+ virtuals: allowed virtuals
"""
- return [d for d in self._dependencies.select(child=name, depflag=depflag)]
+ return [
+ d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
+ ]
@property
def edge_attributes(self) -> str:
@@ -1488,18 +1645,29 @@ class Spec:
result = f"{deptypes_str} {virtuals_str}".strip()
return f"[{result}]"
- def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
- """Return a list of direct dependencies (nodes in the DAG).
+ def dependencies(
+ self,
+ name=None,
+ deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
+ *,
+ virtuals: Optional[List[str]] = None,
+ ) -> List["Spec"]:
+ """Returns a list of direct dependencies (nodes in the DAG)
Args:
- name (str): filter dependencies by package name
+ name: filter dependencies by package name
deptype: allowed dependency types
+ virtuals: allowed virtuals
"""
if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
- return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
+ return [
+ d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
+ ]
- def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
+ def dependents(
+ self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
+ ) -> List["Spec"]:
"""Return a list of direct dependents (nodes in the DAG).
Args:
@@ -1527,38 +1695,14 @@ class Spec:
result[key] = list(group)
return result
- #
- # Private routines here are called by the parser when building a spec.
- #
- def _add_versions(self, version_list):
- """Called by the parser to add an allowable version."""
- # If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.any_version:
- raise MultipleVersionError(
- "A spec cannot contain multiple version signifiers." " Use a version list instead."
- )
- self.versions = vn.VersionList()
- for version in version_list:
- self.versions.add(version)
-
def _add_flag(self, name, value, propagate):
"""Called by the parser to add a known flag.
Known flags currently include "arch"
"""
- # If the == syntax is used to propagate the spec architecture
- # This is an error
- architecture_names = [
- "arch",
- "architecture",
- "platform",
- "os",
- "operating_system",
- "target",
- ]
- if propagate and name in architecture_names:
- raise ArchitecturePropagationError(
- "Unable to propagate the architecture failed." " Use a '=' instead."
+ if propagate and name in vt.reserved_names:
+ raise UnsupportedPropagationError(
+ f"Propagation with '==' is not supported for '{name}'."
)
valid_flags = FlagMap.valid_compiler_flags()
@@ -1572,11 +1716,14 @@ class Spec:
self._set_architecture(os=value)
elif name == "target":
self._set_architecture(target=value)
+ elif name == "namespace":
+ self.namespace = value
elif name in valid_flags:
assert self.compiler_flags is not None
flags_and_propagation = spack.compiler.tokenize_flags(value, propagate)
+ flag_group = " ".join(x for (x, y) in flags_and_propagation)
for flag, propagation in flags_and_propagation:
- self.compiler_flags.add_flag(name, flag, propagation)
+ self.compiler_flags.add_flag(name, flag, propagation, flag_group)
else:
# FIXME:
# All other flags represent variants. 'foo=true' and 'foo=false'
@@ -1591,9 +1738,7 @@ class Spec:
"""Called by the parser to set the architecture."""
arch_attrs = ["platform", "os", "target"]
if self.architecture and self.architecture.concrete:
- raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name
- )
+ raise DuplicateArchitectureError("Spec cannot have two architectures.")
if not self.architecture:
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
@@ -1602,44 +1747,42 @@ class Spec:
new_attrvals = [(a, v) for a, v in kwargs.items() if a in arch_attrs]
for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr):
- raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two '%s' specified "
- "for its architecture" % (self.name, new_attr)
- )
+ raise DuplicateArchitectureError(f"Cannot specify '{new_attr}' twice")
else:
setattr(self.architecture, new_attr, new_value)
- def _set_compiler(self, compiler):
- """Called by the parser to set the compiler."""
- if self.compiler:
- raise DuplicateCompilerSpecError(
- "Spec for '%s' cannot have two compilers." % self.name
- )
- self.compiler = compiler
-
def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]):
"""Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies or not spec.name:
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
return
- # Keep the intersection of constraints when a dependency is added
- # multiple times. Currently, we only allow identical edge types.
+ # Keep the intersection of constraints when a dependency is added multiple times with
+ # the same deptype. Add a new dependency if it is added with a compatible deptype
+ # (for example, a build-only dependency is compatible with a link-only dependenyc).
+ # The only restrictions, currently, are that we cannot add edges with overlapping
+ # dependency types and we cannot add multiple edges that have link/run dependency types.
+ # See ``spack.deptypes.compatible``.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
except StopIteration:
- current_deps = ", ".join(
- dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig
- )
- raise DuplicateDependencyError(
- f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n"
- f"\tRequired: {dt.flag_to_chars(depflag)}\n"
- f"\tDependency: {current_deps}"
- )
+ # Error if we have overlapping or incompatible deptypes
+ if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig):
+ edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
+ required_dep_str = f"^[{edge_attrs}] {str(spec)}"
+
+ raise DuplicateDependencyError(
+ f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
+ f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
+ )
+
+ self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
+ return
try:
dspec.spec.constrain(spec)
+ dspec.update_virtuals(virtuals=virtuals)
except spack.error.UnsatisfiableSpecError:
raise DuplicateDependencyError(
f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'"
@@ -1660,7 +1803,10 @@ class Spec:
for edge in selected:
has_errors, details = False, []
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
- if edge.depflag & depflag:
+
+ # If the dependency is to an existing spec, we can update dependency
+ # types. If it is to a new object, check deptype compatibility.
+ if id(edge.spec) != id(dependency_spec) and not dt.compatible(edge.depflag, depflag):
has_errors = True
details.append(
(
@@ -1669,14 +1815,13 @@ class Spec:
)
)
- if any(v in edge.virtuals for v in virtuals):
- has_errors = True
- details.append(
- (
- f"{edge.parent.name} has already an edge matching any"
- f" of these virtuals {virtuals}"
+ if any(v in edge.virtuals for v in virtuals):
+ details.append(
+ (
+ f"{edge.parent.name} has already an edge matching any"
+ f" of these virtuals {virtuals}"
+ )
)
- )
if has_errors:
raise spack.error.SpecError(msg, "\n".join(details))
@@ -1808,14 +1953,14 @@ class Spec:
"""Returns a version of the spec with the dependencies hashed
instead of completely enumerated."""
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
- spec_format += "{variants}{arch=architecture}{/hash:7}"
+ spec_format += "{variants}{ arch=architecture}{/hash:7}"
return self.format(spec_format)
@property
def cshort_spec(self):
"""Returns an auto-colorized version of ``self.short_spec``."""
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
- spec_format += "{variants}{arch=architecture}{/hash:7}"
+ spec_format += "{variants}{ arch=architecture}{/hash:7}"
return self.cformat(spec_format)
@property
@@ -1920,6 +2065,7 @@ class Spec:
def _lookup_hash(self):
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
store, or finally, binary caches."""
+ import spack.binary_distribution
import spack.environment
active_env = spack.environment.active_environment()
@@ -1927,7 +2073,7 @@ class Spec:
# First env, then store, then binary cache
matches = (
(active_env.all_matching_specs(self) if active_env else [])
- or spack.store.STORE.db.query(self, installed=any)
+ or spack.store.STORE.db.query(self, installed=InstallRecordStatus.ANY)
or spack.binary_distribution.BinaryCacheQuery(True)(self)
)
@@ -1935,7 +2081,7 @@ class Spec:
raise InvalidHashError(self, self.abstract_hash)
if len(matches) != 1:
- raise spack.spec.AmbiguousHashError(
+ raise AmbiguousHashError(
f"Multiple packages specify hash beginning '{self.abstract_hash}'.", *matches
)
@@ -2057,6 +2203,18 @@ class Spec:
if params:
d["parameters"] = params
+ if params and not self.concrete:
+ flag_names = [
+ name
+ for name, flags in self.compiler_flags.items()
+ if any(x.propagate for x in flags)
+ ]
+ d["propagate"] = sorted(
+ itertools.chain(
+ [v.name for v in self.variants.values() if v.propagate], flag_names
+ )
+ )
+
if self.external:
d["external"] = syaml.syaml_dict(
[
@@ -2074,7 +2232,12 @@ class Spec:
if hasattr(variant, "_patches_in_order_of_appearance"):
d["patches"] = variant._patches_in_order_of_appearance
- if self._concrete and hash.package_hash and self._package_hash:
+ if (
+ self._concrete
+ and hash.package_hash
+ and hasattr(self, "_package_hash")
+ and self._package_hash
+ ):
# We use the attribute here instead of `self.package_hash()` because this
# should *always* be assignhed at concretization time. We don't want to try
# to compute a package hash for concrete spec where a) the package might not
@@ -2224,16 +2387,10 @@ class Spec:
spec is concrete, the full hash is added as well. If 'build' is in
the hash_type, the build hash is also added."""
node = self.to_node_dict(hash)
+ # All specs have at least a DAG hash
node[ht.dag_hash.name] = self.dag_hash()
- # dag_hash is lazily computed -- but if we write a spec out, we want it
- # to be included. This is effectively the last chance we get to compute
- # it accurately.
- if self.concrete:
- # all specs have at least a DAG hash
- node[ht.dag_hash.name] = self.dag_hash()
-
- else:
+ if not self.concrete:
node["concrete"] = False
# we can also give them other hash types if we want
@@ -2265,14 +2422,16 @@ class Spec:
package_cls = spack.repo.PATH.get_pkg_class(new_spec.name)
if change_spec.versions and not change_spec.versions == vn.any_version:
new_spec.versions = change_spec.versions
- for variant, value in change_spec.variants.items():
- if variant in package_cls.variants:
- if variant in new_spec.variants:
+
+ for vname, value in change_spec.variants.items():
+ if vname in package_cls.variant_names():
+ if vname in new_spec.variants:
new_spec.variants.substitute(value)
else:
- new_spec.variants[variant] = value
+ new_spec.variants[vname] = value
else:
- raise ValueError("{0} is not a variant of {1}".format(variant, new_spec.name))
+ raise ValueError("{0} is not a variant of {1}".format(vname, new_spec.name))
+
if change_spec.compiler:
new_spec.compiler = change_spec.compiler
if change_spec.compiler_flags:
@@ -2373,7 +2532,7 @@ class Spec:
spec_like, dep_like = next(iter(d.items()))
# If the requirements was for unique nodes (default)
- # then re-use keys from the local cache. Otherwise build
+ # then reuse keys from the local cache. Otherwise build
# a new node every time.
if not isinstance(spec_like, Spec):
spec = spec_cache[spec_like] if normal else Spec(spec_like)
@@ -2500,22 +2659,27 @@ class Spec:
return Spec.from_dict(extracted_json)
@staticmethod
- def from_detection(spec_str, extra_attributes=None):
+ def from_detection(
+ spec_str: str,
+ *,
+ external_path: str,
+ external_modules: Optional[List[str]] = None,
+ extra_attributes: Optional[Dict] = None,
+ ) -> "Spec":
"""Construct a spec from a spec string determined during external
detection and attach extra attributes to it.
Args:
- spec_str (str): spec string
- extra_attributes (dict): dictionary containing extra attributes
-
- Returns:
- spack.spec.Spec: external spec
+ spec_str: spec string
+ external_path: prefix of the external spec
+ external_modules: optional module files to be loaded when the external spec is used
+ extra_attributes: dictionary containing extra attributes
"""
- s = Spec(spec_str)
+ s = Spec(spec_str, external_path=external_path, external_modules=external_modules)
extra_attributes = syaml.sorted_dict(extra_attributes or {})
# This is needed to be able to validate multi-valued variants,
# otherwise they'll still be abstract in the context of detection.
- vt.substitute_abstract_variants(s)
+ substitute_abstract_variants(s)
s.extra_attributes = extra_attributes
return s
@@ -2537,293 +2701,6 @@ class Spec:
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
- def _concretize_helper(self, concretizer, presets=None, visited=None):
- """Recursive helper function for concretize().
- This concretizes everything bottom-up. As things are
- concretized, they're added to the presets, and ancestors
- will prefer the settings of their children.
- """
- if presets is None:
- presets = {}
- if visited is None:
- visited = set()
-
- if self.name in visited:
- return False
-
- if self.concrete:
- visited.add(self.name)
- return False
-
- changed = False
-
- # Concretize deps first -- this is a bottom-up process.
- for name in sorted(self._dependencies):
- # WARNING: This function is an implementation detail of the
- # WARNING: original concretizer. Since with that greedy
- # WARNING: algorithm we don't allow multiple nodes from
- # WARNING: the same package in a DAG, here we hard-code
- # WARNING: using index 0 i.e. we assume that we have only
- # WARNING: one edge from package "name"
- changed |= self._dependencies[name][0].spec._concretize_helper(
- concretizer, presets, visited
- )
-
- if self.name in presets:
- changed |= self.constrain(presets[self.name])
- else:
- # Concretize virtual dependencies last. Because they're added
- # to presets below, their constraints will all be merged, but we'll
- # still need to select a concrete package later.
- if not self.virtual:
- changed |= any(
- (
- concretizer.concretize_develop(self), # special variant
- concretizer.concretize_architecture(self),
- concretizer.concretize_compiler(self),
- concretizer.adjust_target(self),
- # flags must be concretized after compiler
- concretizer.concretize_compiler_flags(self),
- concretizer.concretize_version(self),
- concretizer.concretize_variants(self),
- )
- )
- presets[self.name] = self
-
- visited.add(self.name)
- return changed
-
- def _replace_with(self, concrete):
- """Replace this virtual spec with a concrete spec."""
- assert self.virtual
- virtuals = (self.name,)
- for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
- dependent = dep_spec.parent
- depflag = dep_spec.depflag
-
- # remove self from all dependents, unless it is already removed
- if self.name in dependent._dependencies:
- del dependent._dependencies.edges[self.name]
-
- # add the replacement, unless it is already a dep of dependent.
- if concrete.name not in dependent._dependencies:
- dependent._add_dependency(concrete, depflag=depflag, virtuals=virtuals)
- else:
- dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
- virtuals=virtuals
- )
-
- def _expand_virtual_packages(self, concretizer):
- """Find virtual packages in this spec, replace them with providers,
- and normalize again to include the provider's (potentially virtual)
- dependencies. Repeat until there are no virtual deps.
-
- Precondition: spec is normalized.
-
- .. todo::
-
- If a provider depends on something that conflicts with
- other dependencies in the spec being expanded, this can
- produce a conflicting spec. For example, if mpich depends
- on hwloc@:1.3 but something in the spec needs hwloc1.4:,
- then we should choose an MPI other than mpich. Cases like
- this are infrequent, but should implement this before it is
- a problem.
- """
- # Make an index of stuff this spec already provides
- self_index = spack.provider_index.ProviderIndex(
- repository=spack.repo.PATH, specs=self.traverse(), restrict=True
- )
- changed = False
- done = False
-
- while not done:
- done = True
- for spec in list(self.traverse()):
- replacement = None
- if spec.external:
- continue
- if spec.virtual:
- replacement = self._find_provider(spec, self_index)
- if replacement:
- # TODO: may break if in-place on self but
- # shouldn't happen if root is traversed first.
- spec._replace_with(replacement)
- done = False
- break
-
- if not replacement:
- # Get a list of possible replacements in order of
- # preference.
- candidates = concretizer.choose_virtual_or_external(spec)
-
- # Try the replacements in order, skipping any that cause
- # satisfiability problems.
- for replacement in candidates:
- if replacement is spec:
- break
-
- # Replace spec with the candidate and normalize
- copy = self.copy()
- copy[spec.name]._dup(replacement, deps=False)
-
- try:
- # If there are duplicate providers or duplicate
- # provider deps, consolidate them and merge
- # constraints.
- copy.normalize(force=True)
- break
- except spack.error.SpecError:
- # On error, we'll try the next replacement.
- continue
-
- # If replacement is external then trim the dependencies
- if replacement.external:
- if spec._dependencies:
- for dep in spec.dependencies():
- del dep._dependents.edges[spec.name]
- changed = True
- spec.clear_dependencies()
- replacement.clear_dependencies()
- replacement.architecture = self.architecture
-
- # TODO: could this and the stuff in _dup be cleaned up?
- def feq(cfield, sfield):
- return (not cfield) or (cfield == sfield)
-
- if replacement is spec or (
- feq(replacement.name, spec.name)
- and feq(replacement.versions, spec.versions)
- and feq(replacement.compiler, spec.compiler)
- and feq(replacement.architecture, spec.architecture)
- and feq(replacement._dependencies, spec._dependencies)
- and feq(replacement.variants, spec.variants)
- and feq(replacement.external_path, spec.external_path)
- and feq(replacement.external_modules, spec.external_modules)
- ):
- continue
- # Refine this spec to the candidate. This uses
- # replace_with AND dup so that it can work in
- # place. TODO: make this more efficient.
- if spec.virtual:
- spec._replace_with(replacement)
- changed = True
- if spec._dup(replacement, deps=False, cleardeps=False):
- changed = True
-
- self_index.update(spec)
- done = False
- break
-
- return changed
-
- def _old_concretize(self, tests=False, deprecation_warning=True):
- """A spec is concrete if it describes one build of a package uniquely.
- This will ensure that this spec is concrete.
-
- Args:
- tests (list or bool): list of packages that will need test
- dependencies, or True/False for test all/none
- deprecation_warning (bool): enable or disable the deprecation
- warning for the old concretizer
-
- If this spec could describe more than one version, variant, or build
- of a package, this will add constraints to make it concrete.
-
- Some rigorous validation and checks are also performed on the spec.
- Concretizing ensures that it is self-consistent and that it's
- consistent with requirements of its packages. See flatten() and
- normalize() for more details on this.
- """
- import spack.concretize
-
- # Add a warning message to inform users that the original concretizer
- # will be removed
- if deprecation_warning:
- msg = (
- "the original concretizer is currently being used.\n\tUpgrade to "
- '"clingo" at your earliest convenience. The original concretizer '
- "will be removed from Spack in a future version."
- )
- warnings.warn(msg)
-
- self.replace_hash()
-
- if not self.name:
- raise spack.error.SpecError("Attempting to concretize anonymous spec")
-
- if self._concrete:
- return
-
- changed = True
- force = False
-
- user_spec_deps = self.flat_dependencies(copy=False)
- concretizer = spack.concretize.Concretizer(self.copy())
- while changed:
- changes = (
- self.normalize(force, tests=tests, user_spec_deps=user_spec_deps),
- self._expand_virtual_packages(concretizer),
- self._concretize_helper(concretizer),
- )
- changed = any(changes)
- force = True
-
- visited_user_specs = set()
- for dep in self.traverse():
- visited_user_specs.add(dep.name)
- pkg_cls = spack.repo.PATH.get_pkg_class(dep.name)
- visited_user_specs.update(x.name for x in pkg_cls(dep).provided)
-
- extra = set(user_spec_deps.keys()).difference(visited_user_specs)
- if extra:
- raise InvalidDependencyError(self.name, extra)
-
- Spec.inject_patches_variant(self)
-
- for s in self.traverse():
- # TODO: Refactor this into a common method to build external specs
- # TODO: or turn external_path into a lazy property
- Spec.ensure_external_path_if_external(s)
-
- # assign hashes and mark concrete
- self._finalize_concretization()
-
- # If any spec in the DAG is deprecated, throw an error
- Spec.ensure_no_deprecated(self)
-
- # Update externals as needed
- for dep in self.traverse():
- if dep.external:
- dep.package.update_external_dependencies()
-
- # Now that the spec is concrete we should check if
- # there are declared conflicts
- #
- # TODO: this needs rethinking, as currently we can only express
- # TODO: internal configuration conflicts within one package.
- matches = []
- for x in self.traverse():
- if x.external:
- # external specs are already built, don't worry about whether
- # it's possible to build that configuration with Spack
- continue
- for conflict_spec, when_list in x.package_class.conflicts.items():
- if x.satisfies(conflict_spec):
- for when_spec, msg in when_list:
- if x.satisfies(when_spec):
- when = when_spec.copy()
- when.name = x.name
- matches.append((x, conflict_spec, when, msg))
- if matches:
- raise ConflictsInSpecError(self, matches)
-
- # Check if we can produce an optimized binary (will throw if
- # there are declared inconsistencies)
- # No need on platform=cray because of the targeting modules
- if not self.satisfies("platform=cray"):
- self.architecture.target.optimization_flags(self.compiler)
-
def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2861,13 +2738,14 @@ class Spec:
continue
# Add any patches from the package to the spec.
- patches = []
+ patches = set()
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
for patch in patch_list:
- patches.append(patch)
+ patches.add(patch)
if patches:
spec_to_patches[id(s)] = patches
+
# Also record all patches required on dependencies by
# depends_on(..., patch=...)
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
@@ -2875,17 +2753,25 @@ class Spec:
continue
pkg_deps = dspec.parent.package_class.dependencies
- if dspec.spec.name not in pkg_deps:
- continue
patches = []
- for cond, dependency in pkg_deps[dspec.spec.name].items():
+ for cond, deps_by_name in pkg_deps.items():
+ if not dspec.parent.satisfies(cond):
+ continue
+
+ dependency = deps_by_name.get(dspec.spec.name)
+ if not dependency:
+ continue
+
for pcond, patch_list in dependency.patches.items():
- if dspec.parent.satisfies(cond) and dspec.spec.satisfies(pcond):
+ if dspec.spec.satisfies(pcond):
patches.extend(patch_list)
+
if patches:
- all_patches = spec_to_patches.setdefault(id(dspec.spec), [])
- all_patches.extend(patches)
+ all_patches = spec_to_patches.setdefault(id(dspec.spec), set())
+ for patch in patches:
+ all_patches.add(patch)
+
for spec in root.traverse():
if id(spec) not in spec_to_patches:
continue
@@ -2944,7 +2830,13 @@ class Spec:
msg += " For each package listed, choose another spec\n"
raise SpecDeprecatedError(msg)
- def _new_concretize(self, tests=False):
+ def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
+ """Concretize the current spec.
+
+ Args:
+ tests: if False disregard 'test' dependencies, if a list of names activate them for
+ the packages in the list, if True activate 'test' dependencies for all packages.
+ """
import spack.solver.asp
self.replace_hash()
@@ -2961,7 +2853,6 @@ class Spec:
allow_deprecated = spack.config.get("config:deprecated", False)
solver = spack.solver.asp.Solver()
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
- result.raise_if_unsat()
# take the best answer
opt, i, answer = min(result.answers)
@@ -2979,19 +2870,6 @@ class Spec:
concretized = answer[node]
self._dup(concretized)
- def concretize(self, tests=False):
- """Concretize the current spec.
-
- Args:
- tests (bool or list): if False disregard 'test' dependencies,
- if a list of names activate them for the packages in the list,
- if True activate 'test' dependencies for all packages.
- """
- if spack.config.get("config:concretizer", "clingo") == "clingo":
- self._new_concretize(tests)
- else:
- self._old_concretize(tests)
-
def _mark_root_concrete(self, value=True):
"""Mark just this spec (not dependencies) concrete."""
if (not value) and self.concrete and self.installed:
@@ -3031,7 +2909,7 @@ class Spec:
if (not value) and s.concrete and s.installed:
continue
elif not value:
- s.clear_cached_hashes()
+ s.clear_caches()
s._mark_root_concrete(value)
def _finalize_concretization(self):
@@ -3080,7 +2958,7 @@ class Spec:
for spec in self.traverse():
spec._cached_hash(ht.dag_hash)
- def concretized(self, tests=False):
+ def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
"""This is a non-destructive version of concretize().
First clones, then returns a concrete version of this package
@@ -3095,46 +2973,6 @@ class Spec:
clone.concretize(tests=tests)
return clone
- def flat_dependencies(self, **kwargs):
- """Return a DependencyMap containing all of this spec's
- dependencies with their constraints merged.
-
- If copy is True, returns merged copies of its dependencies
- without modifying the spec it's called on.
-
- If copy is False, clears this spec's dependencies and
- returns them. This disconnects all dependency links including
- transitive dependencies, except for concrete specs: if a spec
- is concrete it will not be disconnected from its dependencies
- (although a non-concrete spec with concrete dependencies will
- be disconnected from those dependencies).
- """
- copy = kwargs.get("copy", True)
-
- flat_deps = {}
- try:
- deptree = self.traverse(root=False)
- for spec in deptree:
- if spec.name not in flat_deps:
- if copy:
- spec = spec.copy(deps=False)
- flat_deps[spec.name] = spec
- else:
- flat_deps[spec.name].constrain(spec)
-
- if not copy:
- for spec in flat_deps.values():
- if not spec.concrete:
- spec.clear_edges()
- self.clear_dependencies()
-
- return flat_deps
-
- except spack.error.UnsatisfiableSpecError as e:
- # Here, the DAG contains two instances of the same package
- # with inconsistent constraints.
- raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) from e
-
def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this
spec.
@@ -3144,300 +2982,6 @@ class Spec:
dm[spec.name].append(spec)
return dm
- def _evaluate_dependency_conditions(self, name):
- """Evaluate all the conditions on a dependency with this name.
-
- Args:
- name (str): name of dependency to evaluate conditions on.
-
- Returns:
- (Dependency): new Dependency object combining all constraints.
-
- If the package depends on <name> in the current spec
- configuration, return the constrained dependency and
- corresponding dependency types.
-
- If no conditions are True (and we don't depend on it), return
- ``(None, None)``.
- """
- conditions = self.package_class.dependencies[name]
-
- vt.substitute_abstract_variants(self)
- # evaluate when specs to figure out constraints on the dependency.
- dep = None
- for when_spec, dependency in conditions.items():
- if self.satisfies(when_spec):
- if dep is None:
- dep = dp.Dependency(self.name, Spec(name), depflag=0)
- try:
- dep.merge(dependency)
- except spack.error.UnsatisfiableSpecError as e:
- e.message = (
- "Conflicting conditional dependencies for spec"
- "\n\n\t{0}\n\n"
- "Cannot merge constraint"
- "\n\n\t{1}\n\n"
- "into"
- "\n\n\t{2}".format(self, dependency.spec, dep.spec)
- )
- raise e
-
- return dep
-
- def _find_provider(self, vdep, provider_index):
- """Find provider for a virtual spec in the provider index.
- Raise an exception if there is a conflicting virtual
- dependency already in this spec.
- """
- assert spack.repo.PATH.is_virtual_safe(vdep.name), vdep
-
- # note that this defensively copies.
- providers = provider_index.providers_for(vdep)
-
- # If there is a provider for the vpkg, then use that instead of
- # the virtual package.
- if providers:
- # Remove duplicate providers that can concretize to the same
- # result.
- for provider in providers:
- for spec in providers:
- if spec is not provider and provider.intersects(spec):
- providers.remove(spec)
- # Can't have multiple providers for the same thing in one spec.
- if len(providers) > 1:
- raise MultipleProviderError(vdep, providers)
- return providers[0]
- else:
- # The user might have required something insufficient for
- # pkg_dep -- so we'll get a conflict. e.g., user asked for
- # mpi@:1.1 but some package required mpi@2.1:.
- required = provider_index.providers_for(vdep.name)
- if len(required) > 1:
- raise MultipleProviderError(vdep, required)
- elif required:
- raise UnsatisfiableProviderSpecError(required[0], vdep)
-
- def _merge_dependency(self, dependency, visited, spec_deps, provider_index, tests):
- """Merge dependency information from a Package into this Spec.
-
- Args:
- dependency (Dependency): dependency metadata from a package;
- this is typically the result of merging *all* matching
- dependency constraints from the package.
- visited (set): set of dependency nodes already visited by
- ``normalize()``.
- spec_deps (dict): ``dict`` of all dependencies from the spec
- being normalized.
- provider_index (dict): ``provider_index`` of virtual dep
- providers in the ``Spec`` as normalized so far.
-
- NOTE: Caller should assume that this routine owns the
- ``dependency`` parameter, i.e., it needs to be a copy of any
- internal structures.
-
- This is the core of ``normalize()``. There are some basic steps:
-
- * If dep is virtual, evaluate whether it corresponds to an
- existing concrete dependency, and merge if so.
-
- * If it's real and it provides some virtual dep, see if it provides
- what some virtual dependency wants and merge if so.
-
- * Finally, if none of the above, merge dependency and its
- constraints into this spec.
-
- This method returns True if the spec was changed, False otherwise.
-
- """
- changed = False
- dep = dependency.spec
-
- # If it's a virtual dependency, try to find an existing
- # provider in the spec, and merge that.
- virtuals = ()
- if spack.repo.PATH.is_virtual_safe(dep.name):
- virtuals = (dep.name,)
- visited.add(dep.name)
- provider = self._find_provider(dep, provider_index)
- if provider:
- dep = provider
- else:
- index = spack.provider_index.ProviderIndex(
- repository=spack.repo.PATH, specs=[dep], restrict=True
- )
- items = list(spec_deps.items())
- for name, vspec in items:
- if not spack.repo.PATH.is_virtual_safe(vspec.name):
- continue
-
- if index.providers_for(vspec):
- vspec._replace_with(dep)
- del spec_deps[vspec.name]
- changed = True
- else:
- required = index.providers_for(vspec.name)
- if required:
- raise UnsatisfiableProviderSpecError(required[0], dep)
- provider_index.update(dep)
-
- # If the spec isn't already in the set of dependencies, add it.
- # Note: dep is always owned by this method. If it's from the
- # caller, it's a copy from _evaluate_dependency_conditions. If it
- # comes from a vdep, it's a defensive copy from _find_provider.
- if dep.name not in spec_deps:
- if self.concrete:
- return False
-
- spec_deps[dep.name] = dep
- changed = True
- else:
- # merge package/vdep information into spec
- try:
- tty.debug("{0} applying constraint {1}".format(self.name, str(dep)))
- changed |= spec_deps[dep.name].constrain(dep)
- except spack.error.UnsatisfiableSpecError as e:
- fmt = "An unsatisfiable {0}".format(e.constraint_type)
- fmt += " constraint has been detected for spec:"
- fmt += "\n\n{0}\n\n".format(spec_deps[dep.name].tree(indent=4))
- fmt += "while trying to concretize the partial spec:"
- fmt += "\n\n{0}\n\n".format(self.tree(indent=4))
- fmt += "{0} requires {1} {2} {3}, but spec asked for {4}"
-
- e.message = fmt.format(
- self.name, dep.name, e.constraint_type, e.required, e.provided
- )
-
- raise
-
- # Add merged spec to my deps and recurse
- spec_dependency = spec_deps[dep.name]
- if dep.name not in self._dependencies:
- self._add_dependency(spec_dependency, depflag=dependency.depflag, virtuals=virtuals)
-
- changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
- return changed
-
- def _normalize_helper(self, visited, spec_deps, provider_index, tests):
- """Recursive helper function for _normalize."""
- if self.name in visited:
- return False
- visited.add(self.name)
-
- # If we descend into a virtual spec, there's nothing more
- # to normalize. Concretize will finish resolving it later.
- if self.virtual or self.external:
- return False
-
- # Avoid recursively adding constraints for already-installed packages:
- # these may include build dependencies which are not needed for this
- # install (since this package is already installed).
- if self.concrete and self.installed:
- return False
-
- # Combine constraints from package deps with constraints from
- # the spec, until nothing changes.
- any_change = False
- changed = True
-
- while changed:
- changed = False
- for dep_name in self.package_class.dependencies:
- # Do we depend on dep_name? If so pkg_dep is not None.
- dep = self._evaluate_dependency_conditions(dep_name)
-
- # If dep is a needed dependency, merge it.
- if dep:
- merge = (
- # caller requested test dependencies
- tests is True
- or (tests and self.name in tests)
- or
- # this is not a test-only dependency
- (dep.depflag & ~dt.TEST)
- )
-
- if merge:
- changed |= self._merge_dependency(
- dep, visited, spec_deps, provider_index, tests
- )
- any_change |= changed
-
- return any_change
-
- def normalize(self, force=False, tests=False, user_spec_deps=None):
- """When specs are parsed, any dependencies specified are hanging off
- the root, and ONLY the ones that were explicitly provided are there.
- Normalization turns a partial flat spec into a DAG, where:
-
- 1. Known dependencies of the root package are in the DAG.
- 2. Each node's dependencies dict only contains its known direct
- deps.
- 3. There is only ONE unique spec for each package in the DAG.
-
- * This includes virtual packages. If there a non-virtual
- package that provides a virtual package that is in the spec,
- then we replace the virtual package with the non-virtual one.
-
- TODO: normalize should probably implement some form of cycle
- detection, to ensure that the spec is actually a DAG.
- """
- if not self.name:
- raise spack.error.SpecError("Attempting to normalize anonymous spec")
-
- # Set _normal and _concrete to False when forced
- if force and not self._concrete:
- self._normal = False
-
- if self._normal:
- return False
-
- # Ensure first that all packages & compilers in the DAG exist.
- self.validate_or_raise()
- # Clear the DAG and collect all dependencies in the DAG, which will be
- # reapplied as constraints. All dependencies collected this way will
- # have been created by a previous execution of 'normalize'.
- # A dependency extracted here will only be reintegrated if it is
- # discovered to apply according to _normalize_helper, so
- # user-specified dependencies are recorded separately in case they
- # refer to specs which take several normalization passes to
- # materialize.
- all_spec_deps = self.flat_dependencies(copy=False)
-
- if user_spec_deps:
- for name, spec in user_spec_deps.items():
- if not name:
- msg = "Attempted to normalize anonymous dependency spec"
- msg += " %s" % spec
- raise InvalidSpecDetected(msg)
- if name not in all_spec_deps:
- all_spec_deps[name] = spec
- else:
- all_spec_deps[name].constrain(spec)
-
- # Initialize index of virtual dependency providers if
- # concretize didn't pass us one already
- provider_index = spack.provider_index.ProviderIndex(
- repository=spack.repo.PATH, specs=[s for s in all_spec_deps.values()], restrict=True
- )
-
- # traverse the package DAG and fill out dependencies according
- # to package files & their 'when' specs
- visited = set()
-
- any_change = self._normalize_helper(visited, all_spec_deps, provider_index, tests)
-
- # Mark the spec as normal once done.
- self._normal = True
- return any_change
-
- def normalized(self):
- """
- Return a normalized copy of this spec without modifying this spec.
- """
- clone = self.copy()
- clone.normalize()
- return clone
-
def validate_or_raise(self):
"""Checks that names and values in this spec are real. If they're not,
it will raise an appropriate exception.
@@ -3458,7 +3002,7 @@ class Spec:
# Ensure correctness of variants (if the spec is not virtual)
if not spec.virtual:
Spec.ensure_valid_variants(spec)
- vt.substitute_abstract_variants(spec)
+ substitute_abstract_variants(spec)
@staticmethod
def ensure_valid_variants(spec):
@@ -3475,50 +3019,19 @@ class Spec:
return
pkg_cls = spec.package_class
- pkg_variants = pkg_cls.variants
+ pkg_variants = pkg_cls.variant_names()
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
+ propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate]
+
not_existing = set(spec.variants) - (
- set(pkg_variants) | set(spack.directives.reserved_names)
+ set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants)
)
- if not_existing:
- raise vt.UnknownVariantError(spec, not_existing)
-
- def update_variant_validate(self, variant_name, values):
- """If it is not already there, adds the variant named
- `variant_name` to the spec `spec` based on the definition
- contained in the package metadata. Validates the variant and
- values before returning.
-
- Used to add values to a variant without being sensitive to the
- variant being single or multi-valued. If the variant already
- exists on the spec it is assumed to be multi-valued and the
- values are appended.
-
- Args:
- variant_name: the name of the variant to add or append to
- values: the value or values (as a tuple) to add/append
- to the variant
- """
- if not isinstance(values, tuple):
- values = (values,)
-
- pkg_variant, _ = self.package_class.variants[variant_name]
-
- for value in values:
- if self.variants.get(variant_name):
- msg = (
- f"cannot append the new value '{value}' to the single-valued "
- f"variant '{self.variants[variant_name]}'"
- )
- assert pkg_variant.multi, msg
- self.variants[variant_name].append(value)
- else:
- variant = pkg_variant.make_variant(value)
- self.variants[variant_name] = variant
- pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
- pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
+ if not_existing:
+ raise vt.UnknownVariantError(
+ f"No such variant {not_existing} for spec: '{spec}'", list(not_existing)
+ )
def constrain(self, other, deps=True):
"""Intersect self with other in-place. Return True if self changed, False otherwise.
@@ -3541,6 +3054,10 @@ class Spec:
raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
other = self._autospec(other)
+ if other.concrete and other.satisfies(self):
+ self._dup(other)
+ return True
+
if other.abstract_hash:
if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
self.abstract_hash = other.abstract_hash
@@ -3749,11 +3266,9 @@ class Spec:
return False
if pkg.provides(virtual_spec.name):
- for provided, when_specs in pkg.provided.items():
- if any(
- non_virtual_spec.intersects(when, deps=False) for when in when_specs
- ):
- if provided.intersects(virtual_spec):
+ for when_spec, provided in pkg.provided.items():
+ if non_virtual_spec.intersects(when_spec, deps=False):
+ if any(vpkg.intersects(virtual_spec) for vpkg in provided):
return True
return False
@@ -3856,9 +3371,9 @@ class Spec:
return False
if pkg.provides(other.name):
- for provided, when_specs in pkg.provided.items():
- if any(self.satisfies(when, deps=False) for when in when_specs):
- if provided.intersects(other):
+ for when_spec, provided in pkg.provided.items():
+ if self.satisfies(when_spec, deps=False):
+ if any(vpkg.intersects(other) for vpkg in provided):
return True
return False
@@ -3903,9 +3418,13 @@ class Spec:
if not self._dependencies:
return False
- # If we arrived here, then rhs is abstract. At the moment we don't care about the edge
- # structure of an abstract DAG, so we check if any edge could satisfy the properties
- # we ask for.
+ # If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
+ # all the edges that have an abstract parent, and verify that they match some edge in the
+ # lhs.
+ #
+ # It might happen that the rhs brings in concrete sub-DAGs. For those we don't need to
+ # verify the edge properties, cause everything is encoded in the hash of the nodes that
+ # will be verified later.
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge
@@ -3915,6 +3434,10 @@ class Spec:
if not rhs_edge.virtuals:
continue
+ # Skip edges from a concrete sub-DAG
+ if rhs_edge.parent.concrete:
+ continue
+
if not lhs_edges:
# Construct a map of the link/run subDAG + direct "build" edges,
# keyed by dependency name
@@ -3973,7 +3496,7 @@ class Spec:
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
try:
patch = index.patch_for_package(sha256, pkg_cls)
- except spack.patch.PatchLookupError as e:
+ except spack.error.PatchLookupError as e:
raise spack.error.SpecError(
f"{e}. This usually means the patch was modified or removed. "
"To fix this, either reconcretize or use the original package "
@@ -4029,8 +3552,8 @@ class Spec:
self.architecture = other.architecture.copy() if other.architecture else None
self.compiler = other.compiler.copy() if other.compiler else None
if cleardeps:
- self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
- self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
+ self._dependents = _EdgeMap(store_by_child=False)
+ self._dependencies = _EdgeMap(store_by_child=True)
self.compiler_flags = other.compiler_flags.copy()
self.compiler_flags.spec = self
self.variants = other.variants.copy()
@@ -4134,7 +3657,7 @@ class Spec:
raise spack.error.SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
- def __getitem__(self, name):
+ def __getitem__(self, name: str):
"""Get a dependency from the spec by its name. This call implicitly
sets a query state in the package being retrieved. The behavior of
packages may be influenced by additional query parameters that are
@@ -4143,7 +3666,7 @@ class Spec:
Note that if a virtual package is queried a copy of the Spec is
returned while for non-virtual a reference is returned.
"""
- query_parameters = name.split(":")
+ query_parameters: List[str] = name.split(":")
if len(query_parameters) > 2:
raise KeyError("key has more than one ':' symbol. At most one is admitted.")
@@ -4154,38 +3677,30 @@ class Spec:
csv = query_parameters.pop().strip()
query_parameters = re.split(r"\s*,\s*", csv)
- # In some cases a package appears multiple times in the same DAG for *distinct*
- # specs. For example, a build-type dependency may itself depend on a package
- # the current spec depends on, but their specs may differ. Therefore we iterate
- # in an order here that prioritizes the build, test and runtime dependencies;
- # only when we don't find the package do we consider the full DAG.
order = lambda: itertools.chain(
- self.traverse(deptype="link"),
- self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
- self.traverse(), # fall back to a full search
+ self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
+ self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
+ self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
)
+ # Consider runtime dependencies and direct build/test deps before transitive dependencies,
+ # and prefer matches closest to the root.
try:
- value = next(
- itertools.chain(
- # Regular specs
- (x for x in order() if x.name == name),
- (
- x
- for x in order()
- if (not x.virtual)
- and any(name in edge.virtuals for edge in x.edges_from_dependents())
- ),
- (x for x in order() if (not x.virtual) and x.package.provides(name)),
+ child: Spec = next(
+ e.spec
+ for e in itertools.chain(
+ (e for e in order() if e.spec.name == name or name in e.virtuals),
+ # for historical reasons
+ (e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
)
)
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
- return SpecBuildInterface(value, name, query_parameters)
+ return SpecBuildInterface(child, name, query_parameters, _parent=self)
- return value
+ return child
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.
@@ -4288,17 +3803,19 @@ class Spec:
yield deps
- def colorized(self):
- return colorize_spec(self)
+ @property
+ def namespace_if_anonymous(self):
+ return self.namespace if not self.name else None
+
+ def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = False) -> str:
+ r"""Prints out attributes of a spec according to a format string.
- def format(self, format_string=DEFAULT_FORMAT, **kwargs):
- r"""Prints out particular pieces of a spec, depending on what is
- in the format string.
+ Using an ``{attribute}`` format specifier, any field of the spec can be
+ selected. Those attributes can be recursive. For example,
+ ``s.format({compiler.version})`` will print the version of the compiler.
- Using the ``{attribute}`` syntax, any field of the spec can be
- selected. Those attributes can be recursive. For example,
- ``s.format({compiler.version})`` will print the version of the
- compiler.
+ If the attribute in a format specifier evaluates to ``None``, then the format
+ specifier will evaluate to the empty string, ``""``.
Commonly used attributes of the Spec for format strings include::
@@ -4314,6 +3831,7 @@ class Spec:
architecture.os
architecture.target
prefix
+ namespace
Some additional special-case properties can be added::
@@ -4322,198 +3840,165 @@ class Spec:
spack_install The spack install directory
The ``^`` sigil can be used to access dependencies by name.
- ``s.format({^mpi.name})`` will print the name of the MPI
- implementation in the spec.
+ ``s.format({^mpi.name})`` will print the name of the MPI implementation in the
+ spec.
- The ``@``, ``%``, ``arch=``, and ``/`` sigils
- can be used to include the sigil with the printed
- string. These sigils may only be used with the appropriate
- attributes, listed below::
+ The ``@``, ``%``, and ``/`` sigils can be used to include the sigil with the
+ printed string. These sigils may only be used with the appropriate attributes,
+ listed below::
@ ``{@version}``, ``{@compiler.version}``
% ``{%compiler}``, ``{%compiler.name}``
- arch= ``{arch=architecture}``
/ ``{/hash}``, ``{/hash:7}``, etc
- The ``@`` sigil may also be used for any other property named
- ``version``. Sigils printed with the attribute string are only
- printed if the attribute string is non-empty, and are colored
- according to the color of the attribute.
-
- Sigils are not used for printing variants. Variants listed by
- name naturally print with their sigil. For example,
- ``spec.format('{variants.debug}')`` would print either
- ``+debug`` or ``~debug`` depending on the name of the
- variant. Non-boolean variants print as ``name=value``. To
- print variant names or values independently, use
+ The ``@`` sigil may also be used for any other property named ``version``.
+ Sigils printed with the attribute string are only printed if the attribute
+ string is non-empty, and are colored according to the color of the attribute.
+
+ Variants listed by name naturally print with their sigil. For example,
+ ``spec.format('{variants.debug}')`` prints either ``+debug`` or ``~debug``
+ depending on the name of the variant. Non-boolean variants print as
+ ``name=value``. To print variant names or values independently, use
``spec.format('{variants.<name>.name}')`` or
``spec.format('{variants.<name>.value}')``.
- Spec format strings use ``\`` as the escape character. Use
- ``\{`` and ``\}`` for literal braces, and ``\\`` for the
- literal ``\`` character.
-
- Args:
- format_string (str): string containing the format to be expanded
+ There are a few attributes on specs that can be specified as key-value pairs
+ that are *not* variants, e.g.: ``os``, ``arch``, ``architecture``, ``target``,
+ ``namespace``, etc. You can format these with an optional ``key=`` prefix, e.g.
+ ``{namespace=namespace}`` or ``{arch=architecture}``, etc. The ``key=`` prefix
+ will be colorized along with the value.
- Keyword Args:
- color (bool): True if returned string is colored
- transform (dict): maps full-string formats to a callable \
- that accepts a string and returns another one
+ When formatting specs, key-value pairs are separated from preceding parts of the
+ spec by whitespace. To avoid printing extra whitespace when the formatted
+ attribute is not set, you can add whitespace to the key *inside* the braces of
+ the format string, e.g.:
- """
- color = kwargs.get("color", False)
- transform = kwargs.get("transform", {})
+ { namespace=namespace}
- out = io.StringIO()
+ This evaluates to `` namespace=builtin`` if ``namespace`` is set to ``builtin``,
+ and to ``""`` if ``namespace`` is ``None``.
- def write(s, c=None):
- f = clr.cescape(s)
- if c is not None:
- f = COLOR_FORMATS[c] + f + "@."
- clr.cwrite(f, stream=out, color=color)
+ Spec format strings use ``\`` as the escape character. Use ``\{`` and ``\}`` for
+ literal braces, and ``\\`` for the literal ``\`` character.
- def write_attribute(spec, attribute, color):
- attribute = attribute.lower()
+ Args:
+ format_string: string containing the format to be expanded
+ color: True for colorized result; False for no color; None for auto color.
- sig = ""
- if attribute.startswith(("@", "%", "/")):
- # color sigils that are inside braces
- sig = attribute[0]
- attribute = attribute[1:]
- elif attribute.startswith("arch="):
- sig = " arch=" # include space as separator
- attribute = attribute[5:]
-
- current = spec
- if attribute.startswith("^"):
- attribute = attribute[1:]
- dep, attribute = attribute.split(".", 1)
- current = self[dep]
+ """
+ ensure_modern_format_string(format_string)
+
+ def safe_color(sigil: str, string: str, color_fmt: Optional[str]) -> str:
+ # avoid colorizing if there is no color or the string is empty
+ if (color is False) or not color_fmt or not string:
+ return sigil + string
+ # escape and add the sigil here to avoid multiple concatenations
+ if sigil == "@":
+ sigil = "@@"
+ return clr.colorize(f"{color_fmt}{sigil}{clr.cescape(string)}@.", color=color)
+
+ def format_attribute(match_object: Match) -> str:
+ (esc, sig, dep, hash, hash_len, attribute, close_brace, unmatched_close_brace) = (
+ match_object.groups()
+ )
+ if esc:
+ return esc
+ elif unmatched_close_brace:
+ raise SpecFormatStringError(f"Unmatched close brace: '{format_string}'")
+ elif not close_brace:
+ raise SpecFormatStringError(f"Missing close brace: '{format_string}'")
+
+ current = self if dep is None else self[dep]
+
+ # Hash attributes can return early.
+ # NOTE: we currently treat abstract_hash like an attribute and ignore
+ # any length associated with it. We may want to change that.
+ if hash:
+ if sig and sig != "/":
+ raise SpecFormatSigilError(sig, "DAG hashes", hash)
+ try:
+ length = int(hash_len) if hash_len else None
+ except ValueError:
+ raise SpecFormatStringError(f"Invalid hash length: '{hash_len}'")
+ return safe_color(sig or "", current.dag_hash(length), HASH_COLOR)
if attribute == "":
raise SpecFormatStringError("Format string attributes must be non-empty")
+ attribute = attribute.lower()
parts = attribute.split(".")
assert parts
# check that the sigil is valid for the attribute.
- if sig == "@" and parts[-1] not in ("versions", "version"):
+ if not sig:
+ sig = ""
+ elif sig == "@" and parts[-1] not in ("versions", "version"):
raise SpecFormatSigilError(sig, "versions", attribute)
elif sig == "%" and attribute not in ("compiler", "compiler.name"):
raise SpecFormatSigilError(sig, "compilers", attribute)
- elif sig == "/" and not re.match(r"(abstract_)?hash(:\d+)?$", attribute):
+ elif sig == "/" and attribute != "abstract_hash":
raise SpecFormatSigilError(sig, "DAG hashes", attribute)
- elif sig == " arch=" and attribute not in ("architecture", "arch"):
- raise SpecFormatSigilError(sig, "the architecture", attribute)
-
- # find the morph function for our attribute
- morph = transform.get(attribute, lambda s, x: x)
-
- # Special cases for non-spec attributes and hashes.
- # These must be the only non-dep component of the format attribute
- if attribute == "spack_root":
- write(morph(spec, spack.paths.spack_root))
- return
- elif attribute == "spack_install":
- write(morph(spec, spack.store.STORE.layout.root))
- return
- elif re.match(r"hash(:\d)?", attribute):
- col = "#"
- if ":" in attribute:
- _, length = attribute.split(":")
- write(sig + morph(spec, current.dag_hash(int(length))), col)
- else:
- write(sig + morph(spec, current.dag_hash()), col)
- return
# Iterate over components using getattr to get next element
for idx, part in enumerate(parts):
if not part:
raise SpecFormatStringError("Format string attributes must be non-empty")
- if part.startswith("_"):
+ elif part.startswith("_"):
raise SpecFormatStringError("Attempted to format private attribute")
- else:
- if isinstance(current, vt.VariantMap):
- # subscript instead of getattr for variant names
+ elif isinstance(current, VariantMap):
+ # subscript instead of getattr for variant names
+ try:
current = current[part]
- else:
- # aliases
- if part == "arch":
- part = "architecture"
- elif part == "version":
- # version (singular) requires a concrete versions list. Avoid
- # pedantic errors by using versions (plural) when not concrete.
- # These two are not entirely equivalent for pkg@=1.2.3:
- # - version prints '1.2.3'
- # - versions prints '=1.2.3'
- if not current.versions.concrete:
- part = "versions"
- try:
- current = getattr(current, part)
- except AttributeError:
- parent = ".".join(parts[:idx])
- m = "Attempted to format attribute %s." % attribute
- m += "Spec %s has no attribute %s" % (parent, part)
- raise SpecFormatStringError(m)
- if isinstance(current, vn.VersionList):
- if current == vn.any_version:
- # We don't print empty version lists
- return
-
- if callable(current):
- raise SpecFormatStringError("Attempted to format callable object")
- if current is None:
- # We're not printing anything
- return
+ except KeyError:
+ raise SpecFormatStringError(f"Variant '{part}' does not exist")
+ else:
+ # aliases
+ if part == "arch":
+ part = "architecture"
+ elif part == "version" and not current.versions.concrete:
+ # version (singular) requires a concrete versions list. Avoid
+ # pedantic errors by using versions (plural) when not concrete.
+ # These two are not entirely equivalent for pkg@=1.2.3:
+ # - version prints '1.2.3'
+ # - versions prints '=1.2.3'
+ part = "versions"
+ try:
+ current = getattr(current, part)
+ except AttributeError:
+ raise SpecFormatStringError(
+ f"Attempted to format attribute {attribute}. "
+ f"Spec {'.'.join(parts[:idx])} has no attribute {part}"
+ )
+ if isinstance(current, vn.VersionList) and current == vn.any_version:
+ # don't print empty version lists
+ return ""
+
+ if callable(current):
+ raise SpecFormatStringError("Attempted to format callable object")
+
+ if current is None:
+ # not printing anything
+ return ""
# Set color codes for various attributes
- col = None
- if "variants" in parts:
- col = "+"
- elif "architecture" in parts:
- col = "="
+ color = None
+ if "architecture" in parts:
+ color = ARCHITECTURE_COLOR
+ elif "variants" in parts or sig.endswith("="):
+ color = VARIANT_COLOR
elif "compiler" in parts or "compiler_flags" in parts:
- col = "%"
+ color = COMPILER_COLOR
elif "version" in parts or "versions" in parts:
- col = "@"
-
- # Finally, write the output
- write(sig + morph(spec, str(current)), col)
-
- attribute = ""
- in_attribute = False
- escape = False
-
- for c in format_string:
- if escape:
- out.write(c)
- escape = False
- elif c == "\\":
- escape = True
- elif in_attribute:
- if c == "}":
- write_attribute(self, attribute, color)
- attribute = ""
- in_attribute = False
- else:
- attribute += c
- else:
- if c == "}":
- raise SpecFormatStringError(
- "Encountered closing } before opening { in %s" % format_string
- )
- elif c == "{":
- in_attribute = True
- else:
- out.write(c)
- if in_attribute:
- raise SpecFormatStringError(
- "Format string terminated while reading attribute." "Missing terminating }."
- )
+ color = VERSION_COLOR
+
+ # return empty string if the value of the attribute is None.
+ if current is None:
+ return ""
+
+ # return colored output
+ return safe_color(sig, str(current), color)
- formatted_spec = out.getvalue()
- return formatted_spec.strip()
+ return SPEC_FORMAT_RE.sub(format_attribute, format_string).strip()
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
@@ -4521,6 +4006,16 @@ class Spec:
kwargs.setdefault("color", None)
return self.format(*args, **kwargs)
+ @property
+ def spack_root(self):
+ """Special field for using ``{spack_root}`` in Spec.format()."""
+ return spack.paths.spack_root
+
+ @property
+ def spack_install(self):
+ """Special field for using ``{spack_install}`` in Spec.format()."""
+ return spack.store.STORE.layout.root
+
def format_path(
# self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None
self,
@@ -4546,18 +4041,31 @@ class Spec:
path_ctor = _path_ctor or pathlib.PurePath
format_string_as_path = path_ctor(format_string)
- if format_string_as_path.is_absolute():
+ if format_string_as_path.is_absolute() or (
+ # Paths that begin with a single "\" on windows are relative, but we still
+ # want to preserve the initial "\\" to be consistent with PureWindowsPath.
+ # Ensure that this '\' is not passed to polite_filename() so it's not converted to '_'
+ (os.name == "nt" or path_ctor == pathlib.PureWindowsPath)
+ and format_string_as_path.parts[0] == "\\"
+ ):
output_path_components = [format_string_as_path.parts[0]]
input_path_components = list(format_string_as_path.parts[1:])
else:
output_path_components = []
input_path_components = list(format_string_as_path.parts)
+
output_path_components += [
- fs.polite_filename(self.format(x)) for x in input_path_components
+ fs.polite_filename(self.format(part)) for part in input_path_components
]
return str(path_ctor(*output_path_components))
def __str__(self):
+ if self._concrete:
+ return self.format("{name}{@version}{/hash}")
+
+ if not self._dependencies:
+ return self.format()
+
root_str = [self.format()]
sorted_dependencies = sorted(
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
@@ -4580,7 +4088,7 @@ class Spec:
spec_str = " ^".join(root_str + sorted_dependencies)
return spec_str.strip()
- def install_status(self):
+ def install_status(self) -> InstallStatus:
"""Helper for tree to print DB install status."""
if not self.concrete:
return InstallStatus.absent
@@ -4624,13 +4132,14 @@ class Spec:
recurse_dependencies: bool = True,
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
prefix: Optional[Callable[["Spec"], str]] = None,
+ key=id,
) -> str:
- """Prints out this spec and its dependencies, tree-formatted
- with indentation.
+ """Prints out this spec and its dependencies, tree-formatted with indentation.
- Status function may either output a boolean or an InstallStatus
+ See multi-spec ``spack.spec.tree()`` function for details.
Args:
+ specs: List of specs to format.
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
use the default from llnl.tty.color
depth: print the depth from the root
@@ -4648,60 +4157,23 @@ class Spec:
prefix: optional callable that takes a node as an argument and return its
installation prefix
"""
- out = ""
-
- if color is None:
- color = clr.get_color_when()
-
- for d, dep_spec in traverse.traverse_tree(
- [self], cover=cover, deptype=deptypes, depth_first=depth_first
- ):
- node = dep_spec.spec
-
- if prefix is not None:
- out += prefix(node)
- out += " " * indent
-
- if depth:
- out += "%-4d" % d
-
- if status_fn:
- status = status_fn(node)
- if status in list(InstallStatus):
- out += clr.colorize(status.value, color=color)
- elif status:
- out += clr.colorize("@g{[+]} ", color=color)
- else:
- out += clr.colorize("@r{[-]} ", color=color)
-
- if hashes:
- out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
-
- if show_types:
- if cover == "nodes":
- # when only covering nodes, we merge dependency types
- # from all dependents before showing them.
- depflag = 0
- for ds in node.edges_from_dependents():
- depflag |= ds.depflag
- else:
- # when covering edges or paths, we show dependency
- # types only for the edge through which we visited
- depflag = dep_spec.depflag
-
- type_chars = dt.flag_to_chars(depflag)
- out += "[%s] " % type_chars
-
- out += " " * d
- if d > 0:
- out += "^"
- out += node.format(format, color=color) + "\n"
-
- # Check if we wanted just the first line
- if not recurse_dependencies:
- break
-
- return out
+ return tree(
+ [self],
+ color=color,
+ depth=depth,
+ hashes=hashes,
+ hashlen=hashlen,
+ cover=cover,
+ indent=indent,
+ format=format,
+ deptypes=deptypes,
+ show_types=show_types,
+ depth_first=depth_first,
+ recurse_dependencies=recurse_dependencies,
+ status_fn=status_fn,
+ prefix=prefix,
+ key=key,
+ )
def __repr__(self):
return str(self)
@@ -4716,9 +4188,7 @@ class Spec:
@property
def target(self):
- # This property returns the underlying microarchitecture object
- # to give to the attribute the appropriate comparison semantic
- return self.architecture.target.microarchitecture
+ return self.architecture.target
@property
def build_spec(self):
@@ -4742,156 +4212,247 @@ class Spec:
new_dependencies.add(edge)
spec._dependencies = new_dependencies
- def splice(self, other, transitive):
- """Splices dependency "other" into this ("target") Spec, and return the
- result as a concrete Spec.
- If transitive, then other and its dependencies will be extrapolated to
- a list of Specs and spliced in accordingly.
- For example, let there exist a dependency graph as follows:
- T
- | \
- Z<-H
- In this example, Spec T depends on H and Z, and H also depends on Z.
- Suppose, however, that we wish to use a different H, known as H'. This
- function will splice in the new H' in one of two ways:
- 1. transitively, where H' depends on the Z' it was built with, and the
- new T* also directly depends on this new Z', or
- 2. intransitively, where the new T* and H' both depend on the original
- Z.
- Since the Spec returned by this splicing function is no longer deployed
- the same way it was built, any such changes are tracked by setting the
- build_spec to point to the corresponding dependency from the original
- Spec.
- TODO: Extend this for non-concrete Specs.
+ def _virtuals_provided(self, root):
+ """Return set of virtuals provided by self in the context of root"""
+ if root is self:
+ # Could be using any virtual the package can provide
+ return set(v.name for v in self.package.virtuals_provided)
+
+ hashes = [s.dag_hash() for s in root.traverse()]
+ in_edges = set(
+ [edge for edge in self.edges_from_dependents() if edge.parent.dag_hash() in hashes]
+ )
+ return set().union(*[edge.virtuals for edge in in_edges])
+
+ def _splice_match(self, other, self_root, other_root):
+ """Return True if other is a match for self in a splice of other_root into self_root
+
+ Other is a splice match for self if it shares a name, or if self is a virtual provider
+ and other provides a superset of the virtuals provided by self. Virtuals provided are
+ evaluated in the context of a root spec (self_root for self, other_root for other).
+
+ This is a slight oversimplification. Other could be a match for self in the context of
+ one edge in self_root and not in the context of another edge. This method could be
+ expanded in the future to account for these cases.
"""
- assert self.concrete
- assert other.concrete
+ if other.name == self.name:
+ return True
- virtuals_to_replace = [v.name for v in other.package.virtuals_provided if v in self]
- if virtuals_to_replace:
- deps_to_replace = dict((self[v], other) for v in virtuals_to_replace)
- # deps_to_replace = [self[v] for v in virtuals_to_replace]
- else:
- # TODO: sanity check and error raise here for other.name not in self
- deps_to_replace = {self[other.name]: other}
- # deps_to_replace = [self[other.name]]
+ return bool(
+ bool(self._virtuals_provided(self_root))
+ and self._virtuals_provided(self_root) <= other._virtuals_provided(other_root)
+ )
- for d in deps_to_replace:
- if not all(
- v in other.package.virtuals_provided or v not in self
- for v in d.package.virtuals_provided
- ):
- # There was something provided by the original that we don't
- # get from its replacement.
- raise SpliceError(
- ("Splice between {0} and {1} will not provide " "the same virtuals.").format(
- self.name, other.name
- )
- )
- for n in d.traverse(root=False):
- if not all(
- any(
- v in other_n.package.virtuals_provided
- for other_n in other.traverse(root=False)
- )
- or v not in self
- for v in n.package.virtuals_provided
- ):
- raise SpliceError(
- (
- "Splice between {0} and {1} will not provide " "the same virtuals."
- ).format(self.name, other.name)
- )
+ def _splice_detach_and_add_dependents(self, replacement, context):
+ """Helper method for Spec._splice_helper.
+
+ replacement is a node to splice in, context is the scope of dependents to consider relevant
+ to this splice."""
+ # Update build_spec attributes for all transitive dependents
+ # before we start changing their dependencies
+ ancestors_in_context = [
+ a
+ for a in self.traverse(root=False, direction="parents")
+ if a in context.traverse(deptype=dt.LINK | dt.RUN)
+ ]
+ for ancestor in ancestors_in_context:
+ # Only set it if it hasn't been spliced before
+ ancestor._build_spec = ancestor._build_spec or ancestor.copy()
+ ancestor.clear_caches(ignore=(ht.package_hash.attr,))
+ for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD):
+ if edge.depflag & ~dt.BUILD:
+ edge.depflag &= ~dt.BUILD
+ else:
+ ancestor._dependencies[edge.spec.name].remove(edge)
+ edge.spec._dependents[ancestor.name].remove(edge)
- # For now, check that we don't have DAG with multiple specs from the
- # same package
- def multiple_specs(root):
- counter = collections.Counter([node.name for node in root.traverse()])
- _, max_number = counter.most_common()[0]
- return max_number > 1
-
- if multiple_specs(self) or multiple_specs(other):
- msg = (
- 'Either "{0}" or "{1}" contain multiple specs from the same '
- "package, which cannot be handled by splicing at the moment"
- )
- raise ValueError(msg.format(self, other))
+ # For each direct dependent in the link/run graph, replace the dependency on
+ # node with one on replacement
+ for edge in self.edges_from_dependents():
+ if edge.parent not in ancestors_in_context:
+ continue
- # Multiple unique specs with the same name will collide, so the
- # _dependents of these specs should not be trusted.
- # Variants may also be ignored here for now...
+ edge.parent._dependencies.edges[self.name].remove(edge)
+ self._dependents.edges[edge.parent.name].remove(edge)
+ edge.parent._add_dependency(replacement, depflag=edge.depflag, virtuals=edge.virtuals)
- # Keep all cached hashes because we will invalidate the ones that need
- # invalidating later, and we don't want to invalidate unnecessarily
+ def _splice_helper(self, replacement):
+ """Main loop of a transitive splice.
- def from_self(name, transitive):
- if transitive:
- if name in other:
- return False
- if any(v in other for v in self[name].package.virtuals_provided):
- return False
- return True
- else:
- if name == other.name:
- return False
- if any(
- v in other.package.virtuals_provided
- for v in self[name].package.virtuals_provided
- ):
- return False
- return True
+ The while loop around a traversal of self ensures that changes to self from previous
+ iterations are reflected in the traversal. This avoids evaluating irrelevant nodes
+ using topological traversal (all incoming edges traversed before any outgoing edge).
+ If any node will not be in the end result, its parent will be spliced and it will not
+ ever be considered.
+ For each node in self, find any analogous node in replacement and swap it in.
+ We assume all build deps are handled outside of this method
- self_nodes = dict(
- (s.name, s.copy(deps=False))
- for s in self.traverse(root=True)
- if from_self(s.name, transitive)
- )
+ Arguments:
+ replacement: The node that will replace any equivalent node in self
+ self_root: The root of the spec that self comes from. This provides the context for
+ evaluating whether ``replacement`` is a match for each node of ``self``. See
+ ``Spec._splice_match`` and ``Spec._virtuals_provided`` for details.
+ other_root: The root of the spec that replacement comes from. This provides the context
+ for evaluating whether ``replacement`` is a match for each node of ``self``. See
+ ``Spec._splice_match`` and ``Spec._virtuals_provided`` for details.
+ """
+ ids = set(id(s) for s in replacement.traverse())
- if transitive:
- other_nodes = dict((s.name, s.copy(deps=False)) for s in other.traverse(root=True))
- else:
- # NOTE: Does not fully validate providers; loader races possible
- other_nodes = dict(
- (s.name, s.copy(deps=False))
- for s in other.traverse(root=True)
- if s is other or s.name not in self
- )
+ # Sort all possible replacements by name and virtual for easy access later
+ replacements_by_name = collections.defaultdict(list)
+ for node in replacement.traverse():
+ replacements_by_name[node.name].append(node)
+ virtuals = node._virtuals_provided(root=replacement)
+ for virtual in virtuals:
+ replacements_by_name[virtual].append(node)
- nodes = other_nodes.copy()
- nodes.update(self_nodes)
+ changed = True
+ while changed:
+ changed = False
- for name in nodes:
- if name in self_nodes:
- for edge in self[name].edges_to_dependencies():
- dep_name = deps_to_replace.get(edge.spec, edge.spec).name
- nodes[name].add_dependency_edge(
- nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
- )
- if any(dep not in self_nodes for dep in self[name]._dependencies):
- nodes[name].build_spec = self[name].build_spec
- else:
- for edge in other[name].edges_to_dependencies():
- nodes[name].add_dependency_edge(
- nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
- )
- if any(dep not in other_nodes for dep in other[name]._dependencies):
- nodes[name].build_spec = other[name].build_spec
+ # Intentionally allowing traversal to change on each iteration
+ # using breadth-first traversal to ensure we only reach nodes that will
+ # be in final result
+ for node in self.traverse(root=False, order="topo", deptype=dt.ALL & ~dt.BUILD):
+ # If this node has already been swapped in, don't consider it again
+ if id(node) in ids:
+ continue
+
+ analogs = replacements_by_name[node.name]
+ if not analogs:
+ # If we have to check for matching virtuals, then we need to check that it
+ # matches all virtuals. Use `_splice_match` to validate possible matches
+ for virtual in node._virtuals_provided(root=self):
+ analogs += [
+ r
+ for r in replacements_by_name[virtual]
+ if node._splice_match(r, self_root=self, other_root=replacement)
+ ]
+
+ # No match, keep iterating over self
+ if not analogs:
+ continue
+
+ # If there are multiple analogs, this package must satisfy the constraint
+ # that a newer version can always replace a lesser version.
+ analog = max(analogs, key=lambda s: s.version)
+
+ # No splice needed here, keep checking
+ if analog == node:
+ continue
+
+ node._splice_detach_and_add_dependents(analog, context=self)
+ changed = True
+ break
- ret = nodes[self.name]
+ def splice(self, other: "Spec", transitive: bool = True) -> "Spec":
+ """Returns a new, spliced concrete Spec with the "other" dependency and,
+ optionally, its dependencies.
- # Clear cached hashes for all affected nodes
- # Do not touch unaffected nodes
- for dep in ret.traverse(root=True, order="post"):
- opposite = other_nodes if dep.name in self_nodes else self_nodes
- if any(name in dep for name in opposite.keys()):
- # package hash cannot be affected by splice
- dep.clear_cached_hashes(ignore=["package_hash"])
+ Args:
+ other: alternate dependency
+ transitive: include other's dependencies
+
+ Returns: a concrete, spliced version of the current Spec
+
+ When transitive is "True", use the dependencies from "other" to reconcile
+ conflicting dependencies. When transitive is "False", use dependencies from self.
+
+ For example, suppose we have the following dependency graph:
+
+ T
+ | \
+ Z<-H
- dep.dag_hash()
+ Spec T depends on H and Z, and H also depends on Z. Now we want to use
+ a different H, called H'. This function can be used to splice in H' to
+ create a new spec, called T*. If H' was built with Z', then transitive
+ "True" will ensure H' and T* both depend on Z':
- return nodes[self.name]
+ T*
+ | \
+ Z'<-H'
- def clear_cached_hashes(self, ignore=()):
+ If transitive is "False", then H' and T* will both depend on
+ the original Z, resulting in a new H'*
+
+ T*
+ | \
+ Z<-H'*
+
+ Provenance of the build is tracked through the "build_spec" property
+ of the spliced spec and any correspondingly modified dependency specs.
+ The build specs are set to that of the original spec, so the original
+ spec's provenance is preserved unchanged."""
+ assert self.concrete
+ assert other.concrete
+
+ if self._splice_match(other, self_root=self, other_root=other):
+ return other.copy()
+
+ if not any(
+ node._splice_match(other, self_root=self, other_root=other)
+ for node in self.traverse(root=False, deptype=dt.LINK | dt.RUN)
+ ):
+ other_str = other.format("{name}/{hash:7}")
+ self_str = self.format("{name}/{hash:7}")
+ msg = f"Cannot splice {other_str} into {self_str}."
+ msg += f" Either {self_str} cannot depend on {other_str},"
+ msg += f" or {other_str} fails to provide a virtual used in {self_str}"
+ raise SpliceError(msg)
+
+ # Copies of all non-build deps, build deps will get added at the end
+ spec = self.copy(deps=dt.ALL & ~dt.BUILD)
+ replacement = other.copy(deps=dt.ALL & ~dt.BUILD)
+
+ def make_node_pairs(orig_spec, copied_spec):
+ return list(
+ zip(
+ orig_spec.traverse(deptype=dt.ALL & ~dt.BUILD),
+ copied_spec.traverse(deptype=dt.ALL & ~dt.BUILD),
+ )
+ )
+
+ def mask_build_deps(in_spec):
+ for edge in in_spec.traverse_edges(cover="edges"):
+ edge.depflag &= ~dt.BUILD
+
+ if transitive:
+ # These pairs will allow us to reattach all direct build deps
+ # We need the list of pairs while the two specs still match
+ node_pairs = make_node_pairs(self, spec)
+
+ # Ignore build deps in the modified spec while doing the splice
+ # They will be added back in at the end
+ mask_build_deps(spec)
+
+ # Transitively splice any relevant nodes from new into base
+ # This handles all shared dependencies between self and other
+ spec._splice_helper(replacement)
+ else:
+ # Do the same thing as the transitive splice, but reversed
+ node_pairs = make_node_pairs(other, replacement)
+ mask_build_deps(replacement)
+ replacement._splice_helper(spec)
+
+ # Intransitively splice replacement into spec
+ # This is very simple now that all shared dependencies have been handled
+ for node in spec.traverse(order="topo", deptype=dt.LINK | dt.RUN):
+ if node._splice_match(other, self_root=spec, other_root=other):
+ node._splice_detach_and_add_dependents(replacement, context=spec)
+
+ # For nodes that were spliced, modify the build spec to ensure build deps are preserved
+ # For nodes that were not spliced, replace the build deps on the spec itself
+ for orig, copy in node_pairs:
+ if copy._build_spec:
+ copy._build_spec = orig.build_spec.copy()
+ else:
+ for edge in orig.edges_to_dependencies(depflag=dt.BUILD):
+ copy._add_dependency(edge.spec, depflag=dt.BUILD, virtuals=edge.virtuals)
+
+ return spec
+
+ def clear_caches(self, ignore=()):
"""
Clears all cached hashes in a Spec, while preserving other properties.
"""
@@ -4899,7 +4460,9 @@ class Spec:
if h.attr not in ignore:
if hasattr(self, h.attr):
setattr(self, h.attr, None)
- self._dunder_hash = None
+ for attr in ("_dunder_hash", "_prefix"):
+ if attr not in ignore:
+ setattr(self, attr, None)
def __hash__(self):
# If the spec is concrete, we leverage the process hash and just use
@@ -4933,9 +4496,237 @@ class Spec:
v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname))
-def parse_with_version_concrete(string: str, compiler: bool = False):
+class VariantMap(lang.HashableMap):
+ """Map containing variant instances. New values can be added only
+ if the key is not already present."""
+
+ def __init__(self, spec: Spec):
+ super().__init__()
+ self.spec = spec
+
+ def __setitem__(self, name, vspec):
+ # Raise a TypeError if vspec is not of the right type
+ if not isinstance(vspec, vt.AbstractVariant):
+ raise TypeError(
+ "VariantMap accepts only values of variant types "
+ f"[got {type(vspec).__name__} instead]"
+ )
+
+ # Raise an error if the variant was already in this map
+ if name in self.dict:
+ msg = 'Cannot specify variant "{0}" twice'.format(name)
+ raise vt.DuplicateVariantError(msg)
+
+ # Raise an error if name and vspec.name don't match
+ if name != vspec.name:
+ raise KeyError(
+ f'Inconsistent key "{name}", must be "{vspec.name}" to ' "match VariantSpec"
+ )
+
+ # Set the item
+ super().__setitem__(name, vspec)
+
+ def substitute(self, vspec):
+ """Substitutes the entry under ``vspec.name`` with ``vspec``.
+
+ Args:
+ vspec: variant spec to be substituted
+ """
+ if vspec.name not in self:
+ raise KeyError(f"cannot substitute a key that does not exist [{vspec.name}]")
+
+ # Set the item
+ super().__setitem__(vspec.name, vspec)
+
+ def partition_variants(self):
+ non_prop, prop = lang.stable_partition(self.values(), lambda x: not x.propagate)
+ # Just return the names
+ non_prop = [x.name for x in non_prop]
+ prop = [x.name for x in prop]
+ return non_prop, prop
+
+ def satisfies(self, other: "VariantMap") -> bool:
+ if self.spec.concrete:
+ return self._satisfies_when_self_concrete(other)
+ return self._satisfies_when_self_abstract(other)
+
+ def _satisfies_when_self_concrete(self, other: "VariantMap") -> bool:
+ non_propagating, propagating = other.partition_variants()
+ result = all(
+ name in self and self[name].satisfies(other[name]) for name in non_propagating
+ )
+ if not propagating:
+ return result
+
+ for node in self.spec.traverse():
+ if not all(
+ node.variants[name].satisfies(other[name])
+ for name in propagating
+ if name in node.variants
+ ):
+ return False
+ return result
+
+ def _satisfies_when_self_abstract(self, other: "VariantMap") -> bool:
+ other_non_propagating, other_propagating = other.partition_variants()
+ self_non_propagating, self_propagating = self.partition_variants()
+
+ # First check variants without propagation set
+ result = all(
+ name in self_non_propagating
+ and (self[name].propagate or self[name].satisfies(other[name]))
+ for name in other_non_propagating
+ )
+ if result is False or (not other_propagating and not self_propagating):
+ return result
+
+ # Check that self doesn't contradict variants propagated by other
+ if other_propagating:
+ for node in self.spec.traverse():
+ if not all(
+ node.variants[name].satisfies(other[name])
+ for name in other_propagating
+ if name in node.variants
+ ):
+ return False
+
+ # Check that other doesn't contradict variants propagated by self
+ if self_propagating:
+ for node in other.spec.traverse():
+ if not all(
+ node.variants[name].satisfies(self[name])
+ for name in self_propagating
+ if name in node.variants
+ ):
+ return False
+
+ return result
+
+ def intersects(self, other):
+ return all(self[k].intersects(other[k]) for k in other if k in self)
+
+ def constrain(self, other: "VariantMap") -> bool:
+ """Add all variants in other that aren't in self to self. Also constrain all multi-valued
+ variants that are already present. Return True iff self changed"""
+ if other.spec is not None and other.spec._concrete:
+ for k in self:
+ if k not in other:
+ raise vt.UnsatisfiableVariantSpecError(self[k], "<absent>")
+
+ changed = False
+ for k in other:
+ if k in self:
+ # If they are not compatible raise an error
+ if not self[k].compatible(other[k]):
+ raise vt.UnsatisfiableVariantSpecError(self[k], other[k])
+ # If they are compatible merge them
+ changed |= self[k].constrain(other[k])
+ else:
+ # If it is not present copy it straight away
+ self[k] = other[k].copy()
+ changed = True
+
+ return changed
+
+ @property
+ def concrete(self):
+ """Returns True if the spec is concrete in terms of variants.
+
+ Returns:
+ bool: True or False
+ """
+ return self.spec._concrete or all(
+ v in self for v in self.spec.package_class.variant_names()
+ )
+
+ def copy(self) -> "VariantMap":
+ clone = VariantMap(self.spec)
+ for name, variant in self.items():
+ clone[name] = variant.copy()
+ return clone
+
+ def __str__(self):
+ if not self:
+ return ""
+
+ # print keys in order
+ sorted_keys = sorted(self.keys())
+
+ # Separate boolean variants from key-value pairs as they print
+ # differently. All booleans go first to avoid ' ~foo' strings that
+ # break spec reuse in zsh.
+ bool_keys = []
+ kv_keys = []
+ for key in sorted_keys:
+ bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
+
+ # add spaces before and after key/value variants.
+ string = io.StringIO()
+
+ for key in bool_keys:
+ string.write(str(self[key]))
+
+ for key in kv_keys:
+ string.write(" ")
+ string.write(str(self[key]))
+
+ return string.getvalue()
+
+
+def substitute_abstract_variants(spec: Spec):
+ """Uses the information in `spec.package` to turn any variant that needs
+ it into a SingleValuedVariant or BoolValuedVariant.
+
+ This method is best effort. All variants that can be substituted will be
+ substituted before any error is raised.
+
+ Args:
+ spec: spec on which to operate the substitution
+ """
+ # This method needs to be best effort so that it works in matrix exclusion
+ # in $spack/lib/spack/spack/spec_list.py
+ unknown = []
+ for name, v in spec.variants.items():
+ if name == "dev_path":
+ spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
+ continue
+ elif name in vt.reserved_names:
+ continue
+
+ variant_defs = spec.package_class.variant_definitions(name)
+ valid_defs = []
+ for when, vdef in variant_defs:
+ if when.intersects(spec):
+ valid_defs.append(vdef)
+
+ if not valid_defs:
+ if name not in spec.package_class.variant_names():
+ unknown.append(name)
+ else:
+ whens = [str(when) for when, _ in variant_defs]
+ raise InvalidVariantForSpecError(v.name, f"({', '.join(whens)})", spec)
+ continue
+
+ pkg_variant, *rest = valid_defs
+ if rest:
+ continue
+
+ new_variant = pkg_variant.make_variant(v._original_value)
+ pkg_variant.validate_or_raise(new_variant, spec.name)
+ spec.variants.substitute(new_variant)
+
+ if unknown:
+ variants = llnl.string.plural(len(unknown), "variant")
+ raise vt.UnknownVariantError(
+ f"Tried to set {variants} {llnl.string.comma_and(unknown)}. "
+ f"{spec.name} has no such {variants}",
+ unknown_variants=unknown,
+ )
+
+
+def parse_with_version_concrete(spec_like: Union[str, Spec], compiler: bool = False):
"""Same as Spec(string), but interprets @x as @=x"""
- s: Union[CompilerSpec, Spec] = CompilerSpec(string) if compiler else Spec(string)
+ s: Union[CompilerSpec, Spec] = CompilerSpec(spec_like) if compiler else Spec(spec_like)
interpreted_version = s.versions.concrete_range_as_version
if interpreted_version:
s.versions = vn.VersionList([interpreted_version])
@@ -4953,7 +4744,7 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
Args:
*abstract_specs: abstract specs to be merged
"""
- merged_spec = spack.spec.Spec()
+ merged_spec = Spec()
for current_spec_constraint in abstract_specs:
merged_spec.constrain(current_spec_constraint, deps=False)
@@ -5022,13 +4813,17 @@ class SpecfileReaderBase:
else:
spec.compiler = None
+ propagated_names = node.get("propagate", [])
for name, values in node.get("parameters", {}).items():
+ propagate = name in propagated_names
if name in _valid_compiler_flags:
spec.compiler_flags[name] = []
for val in values:
- spec.compiler_flags.add_flag(name, val, False)
+ spec.compiler_flags.add_flag(name, val, propagate)
else:
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
+ name, values, propagate=propagate
+ )
spec.external_path = None
spec.external_modules = None
@@ -5119,11 +4914,15 @@ class SpecfileReaderBase:
virtuals=virtuals,
)
if "build_spec" in node.keys():
- _, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
+ _, bhash, _ = cls.extract_build_spec_info_from_node_dict(node, hash_type=hash_type)
node_spec._build_spec = hash_dict[bhash]["node_spec"]
return hash_dict[root_spec_hash]["node_spec"]
+ @classmethod
+ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
+ raise NotImplementedError("Subclasses must implement this method.")
+
class SpecfileV1(SpecfileReaderBase):
@classmethod
@@ -5243,7 +5042,7 @@ class SpecfileV2(SpecfileReaderBase):
return dep_hash, deptypes, hash_type, virtuals
@classmethod
- def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
+ def extract_build_spec_info_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
build_spec_dict = node["build_spec"]
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
@@ -5304,7 +5103,6 @@ def get_host_environment_metadata() -> Dict[str, str]:
"""Get the host environment, reduce to a subset that we can store in
the install directory, and add the spack version.
"""
- import spack.main
environ = get_host_environment()
return {
@@ -5312,7 +5110,7 @@ def get_host_environment_metadata() -> Dict[str, str]:
"platform": environ["platform"],
"host_target": environ["target"],
"hostname": environ["hostname"],
- "spack_version": spack.main.get_version(),
+ "spack_version": spack.get_version(),
"kernel_version": platform.version(),
}
@@ -5356,10 +5154,17 @@ class SpecParseError(spack.error.SpecError):
)
-class ArchitecturePropagationError(spack.error.SpecError):
- """Raised when the double equal symbols are used to assign
- the spec's architecture.
- """
+class InvalidVariantForSpecError(spack.error.SpecError):
+ """Raised when an invalid conditional variant is specified."""
+
+ def __init__(self, variant, when, spec):
+ msg = f"Invalid variant {variant} for spec {spec}.\n"
+ msg += f"{variant} is only available for {spec.name} when satisfying one of {when}."
+ super().__init__(msg)
+
+
+class UnsupportedPropagationError(spack.error.SpecError):
+ """Raised when propagation (==) is used with reserved variant names."""
class DuplicateDependencyError(spack.error.SpecError):
@@ -5440,7 +5245,7 @@ class UnsatisfiableVersionSpecError(spack.error.UnsatisfiableSpecError):
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
- """Raised when a spec comiler conflicts with package constraints."""
+ """Raised when a spec compiler conflicts with package constraints."""
def __init__(self, provided, required):
super().__init__(provided, required, "compiler")
@@ -5489,7 +5294,7 @@ class UnconstrainableDependencySpecError(spack.error.SpecError):
class AmbiguousHashError(spack.error.SpecError):
def __init__(self, msg, *specs):
spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
- spec_fmt += "{variants}{arch=architecture}{/hash:7}"
+ spec_fmt += "{variants}{ arch=architecture}{/hash:7}"
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
super().__init__(msg + specs_str)