summaryrefslogtreecommitdiff
path: root/lib/spack/spack/spec.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/spack/spack/spec.py')
-rw-r--r--lib/spack/spack/spec.py1815
1 files changed, 880 insertions, 935 deletions
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 94559d001c..d1ee8a4325 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -120,75 +120,80 @@ import spack.variant as vt
import spack.version as vn
__all__ = [
- 'CompilerSpec',
- 'Spec',
- 'SpecParser',
- 'parse',
- 'SpecParseError',
- 'DuplicateDependencyError',
- 'DuplicateCompilerSpecError',
- 'UnsupportedCompilerError',
- 'DuplicateArchitectureError',
- 'InconsistentSpecError',
- 'InvalidDependencyError',
- 'NoProviderError',
- 'MultipleProviderError',
- 'UnsatisfiableSpecNameError',
- 'UnsatisfiableVersionSpecError',
- 'UnsatisfiableCompilerSpecError',
- 'UnsatisfiableCompilerFlagSpecError',
- 'UnsatisfiableArchitectureSpecError',
- 'UnsatisfiableProviderSpecError',
- 'UnsatisfiableDependencySpecError',
- 'AmbiguousHashError',
- 'InvalidHashError',
- 'NoSuchHashError',
- 'RedundantSpecError',
- 'SpecDeprecatedError',
+ "CompilerSpec",
+ "Spec",
+ "SpecParser",
+ "parse",
+ "SpecParseError",
+ "DuplicateDependencyError",
+ "DuplicateCompilerSpecError",
+ "UnsupportedCompilerError",
+ "DuplicateArchitectureError",
+ "InconsistentSpecError",
+ "InvalidDependencyError",
+ "NoProviderError",
+ "MultipleProviderError",
+ "UnsatisfiableSpecNameError",
+ "UnsatisfiableVersionSpecError",
+ "UnsatisfiableCompilerSpecError",
+ "UnsatisfiableCompilerFlagSpecError",
+ "UnsatisfiableArchitectureSpecError",
+ "UnsatisfiableProviderSpecError",
+ "UnsatisfiableDependencySpecError",
+ "AmbiguousHashError",
+ "InvalidHashError",
+ "NoSuchHashError",
+ "RedundantSpecError",
+ "SpecDeprecatedError",
]
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
#: Valid pattern for an identifier in Spack
-identifier_re = r'\w[\w-]*'
+identifier_re = r"\w[\w-]*"
-compiler_color = '@g' #: color for highlighting compilers
-version_color = '@c' #: color for highlighting versions
-architecture_color = '@m' #: color for highlighting architectures
-enabled_variant_color = '@B' #: color for highlighting enabled variants
-disabled_variant_color = '@r' #: color for highlighting disabled varaints
-dependency_color = '@.' #: color for highlighting dependencies
-hash_color = '@K' #: color for highlighting package hashes
+compiler_color = "@g" #: color for highlighting compilers
+version_color = "@c" #: color for highlighting versions
+architecture_color = "@m" #: color for highlighting architectures
+enabled_variant_color = "@B" #: color for highlighting enabled variants
+disabled_variant_color = "@r" #: color for highlighting disabled varaints
+dependency_color = "@." #: color for highlighting dependencies
+hash_color = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
-color_formats = {'%': compiler_color,
- '@': version_color,
- '=': architecture_color,
- '+': enabled_variant_color,
- '~': disabled_variant_color,
- '^': dependency_color,
- '#': hash_color}
+color_formats = {
+ "%": compiler_color,
+ "@": version_color,
+ "=": architecture_color,
+ "+": enabled_variant_color,
+ "~": disabled_variant_color,
+ "^": dependency_color,
+ "#": hash_color,
+}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``color_formats.keys()``.
-_separators = '[\\%s]' % '\\'.join(color_formats.keys())
+_separators = "[\\%s]" % "\\".join(color_formats.keys())
#: Versionlist constant so we don't have to build a list
#: every time we call str()
-_any_version = vn.VersionList([':'])
+_any_version = vn.VersionList([":"])
-default_format = '{name}{@version}'
-default_format += '{%compiler.name}{@compiler.version}{compiler_flags}'
-default_format += '{variants}{arch=architecture}'
+default_format = "{name}{@version}"
+default_format += "{%compiler.name}{@compiler.version}{compiler_flags}"
+default_format += "{variants}{arch=architecture}"
#: Regular expression to pull spec contents out of clearsigned signature
#: file.
CLEARSIGN_FILE_REGEX = re.compile(
- (r"^-----BEGIN PGP SIGNED MESSAGE-----"
- r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"),
- re.MULTILINE | re.DOTALL)
+ (
+ r"^-----BEGIN PGP SIGNED MESSAGE-----"
+ r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"
+ ),
+ re.MULTILINE | re.DOTALL,
+)
#: specfile format version. Must increase monotonically
specfile_format_version = 3
@@ -196,22 +201,22 @@ specfile_format_version = 3
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
- color_formats."""
- class insert_color:
+ color_formats."""
+ class insert_color:
def __init__(self):
self.last = None
def __call__(self, match):
# ignore compiler versions (color same as compiler)
sep = match.group(0)
- if self.last == '%' and sep == '@':
+ if self.last == "%" and sep == "@":
return clr.cescape(sep)
self.last = sep
- return '%s%s' % (color_formats[sep], clr.cescape(sep))
+ return "%s%s" % (color_formats[sep], clr.cescape(sep))
- return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + '@.')
+ return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
@lang.lazy_lexicographic_ordering
@@ -219,6 +224,7 @@ class ArchSpec(object):
"""Aggregate the target platform, the operating system and the target
microarchitecture into an architecture spec..
"""
+
@staticmethod
def _return_arch(os_tag, target_tag):
platform = spack.platforms.host()
@@ -230,15 +236,15 @@ class ArchSpec(object):
@staticmethod
def default_arch():
"""Return the default architecture"""
- return ArchSpec._return_arch('default_os', 'default_target')
+ return ArchSpec._return_arch("default_os", "default_target")
@staticmethod
def frontend_arch():
"""Return the frontend architecture"""
- return ArchSpec._return_arch('frontend', 'frontend')
+ return ArchSpec._return_arch("frontend", "frontend")
def __init__(self, spec_or_platform_tuple=(None, None, None)):
- """ Architecture specification a package should be built with.
+ """Architecture specification a package should be built with.
Each ArchSpec is comprised of three elements: a platform (e.g. Linux),
an OS (e.g. RHEL6), and a target (e.g. x86_64).
@@ -252,7 +258,7 @@ class ArchSpec(object):
# If the argument to __init__ is a spec string, parse it
# and construct an ArchSpec
def _string_or_none(s):
- if s and s != 'None':
+ if s and s != "None":
return str(s)
return None
@@ -268,15 +274,11 @@ class ArchSpec(object):
if isinstance(spec_or_platform_tuple, six.string_types):
spec_fields = spec_or_platform_tuple.split("-")
if len(spec_fields) != 3:
- msg = 'cannot construct an ArchSpec from {0!s}'
+ msg = "cannot construct an ArchSpec from {0!s}"
raise ValueError(msg.format(spec_or_platform_tuple))
platform, operating_system, target = spec_fields
- platform_tuple = (
- _string_or_none(platform),
- _string_or_none(operating_system),
- target
- )
+ platform_tuple = (_string_or_none(platform), _string_or_none(operating_system), target)
self.platform, self.os, self.target = platform_tuple
@@ -324,8 +326,9 @@ class ArchSpec(object):
if self.platform != curr_platform:
raise ValueError(
"Can't set arch spec OS to reserved value '%s' when the "
- "arch platform (%s) isn't the current platform (%s)" %
- (value, self.platform, curr_platform))
+ "arch platform (%s) isn't the current platform (%s)"
+ % (value, self.platform, curr_platform)
+ )
spec_platform = spack.platforms.by_name(self.platform)
value = str(spec_platform.operating_system(value))
@@ -348,7 +351,7 @@ class ArchSpec(object):
def target_or_none(t):
if isinstance(t, spack.target.Target):
return t
- if t and t != 'None':
+ if t and t != "None":
return spack.target.Target(t)
return None
@@ -361,8 +364,9 @@ class ArchSpec(object):
if self.platform != curr_platform:
raise ValueError(
"Can't set arch spec target to reserved value '%s' when "
- "the arch platform (%s) isn't the current platform (%s)" %
- (value, self.platform, curr_platform))
+ "the arch platform (%s) isn't the current platform (%s)"
+ % (value, self.platform, curr_platform)
+ )
spec_platform = spack.platforms.by_name(self.platform)
value = spec_platform.target(value)
@@ -384,23 +388,23 @@ class ArchSpec(object):
other = self._autospec(other)
# Check platform and os
- for attribute in ('platform', 'os'):
+ for attribute in ("platform", "os"):
other_attribute = getattr(other, attribute)
self_attribute = getattr(self, attribute)
if strict or self.concrete:
if other_attribute and self_attribute != other_attribute:
return False
else:
- if other_attribute and self_attribute and \
- self_attribute != other_attribute:
+ if other_attribute and self_attribute and self_attribute != other_attribute:
return False
# Check target
return self.target_satisfies(other, strict=strict)
def target_satisfies(self, other, strict):
- need_to_check = bool(other.target) if strict or self.concrete \
- else bool(other.target and self.target)
+ need_to_check = (
+ bool(other.target) if strict or self.concrete else bool(other.target and self.target)
+ )
# If there's no need to check we are fine
if not need_to_check:
@@ -425,7 +429,7 @@ class ArchSpec(object):
# Compute the intersection of every combination of ranges in the lists
results = self.target_intersection(other)
# Do we need to dedupe here?
- self.target = ','.join(results)
+ self.target = ",".join(results)
def target_intersection(self, other):
results = []
@@ -433,10 +437,10 @@ class ArchSpec(object):
if not self.target or not other.target:
return results
- for s_target_range in str(self.target).split(','):
- s_min, s_sep, s_max = s_target_range.partition(':')
- for o_target_range in str(other.target).split(','):
- o_min, o_sep, o_max = o_target_range.partition(':')
+ for s_target_range in str(self.target).split(","):
+ s_min, s_sep, s_max = s_target_range.partition(":")
+ for o_target_range in str(other.target).split(","):
+ o_min, o_sep, o_max = o_target_range.partition(":")
if not s_sep:
# s_target_range is a concrete target
@@ -446,14 +450,12 @@ class ArchSpec(object):
if not o_sep:
if s_min == o_min:
results.append(s_min)
- elif (not o_min or s_comp >= o_min) and (
- not o_max or s_comp <= o_max):
+ elif (not o_min or s_comp >= o_min) and (not o_max or s_comp <= o_max):
results.append(s_min)
elif not o_sep:
# "cast" to microarchitecture
o_comp = spack.target.Target(o_min).microarchitecture
- if (not s_min or o_comp >= s_min) and (
- not s_max or o_comp <= s_max):
+ if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
results.append(o_min)
else:
# Take intersection of two ranges
@@ -470,7 +472,7 @@ class ArchSpec(object):
if _n_min == _n_max:
results.append(n_min)
elif not n_min or not n_max or _n_min < _n_max:
- results.append('%s:%s' % (n_min, n_max))
+ results.append("%s:%s" % (n_min, n_max))
return results
def constrain(self, other):
@@ -492,7 +494,7 @@ class ArchSpec(object):
raise UnsatisfiableArchitectureSpecError(other, self)
constrained = False
- for attr in ('platform', 'os'):
+ for attr in ("platform", "os"):
svalue, ovalue = getattr(self, attr), getattr(other, attr)
if svalue is None and ovalue is not None:
setattr(self, attr, ovalue)
@@ -510,33 +512,35 @@ class ArchSpec(object):
def concrete(self):
"""True if the spec is concrete, False otherwise"""
# return all(v for k, v in six.iteritems(self.to_cmp_dict()))
- return (self.platform and self.os and self.target and
- self.target_concrete)
+ return self.platform and self.os and self.target and self.target_concrete
@property
def target_concrete(self):
"""True if the target is not a range or list."""
- return ':' not in str(self.target) and ',' not in str(self.target)
+ return ":" not in str(self.target) and "," not in str(self.target)
def to_dict(self):
- d = syaml.syaml_dict([
- ('platform', self.platform),
- ('platform_os', self.os),
- ('target', self.target.to_dict_or_value())])
- return syaml.syaml_dict([('arch', d)])
+ d = syaml.syaml_dict(
+ [
+ ("platform", self.platform),
+ ("platform_os", self.os),
+ ("target", self.target.to_dict_or_value()),
+ ]
+ )
+ return syaml.syaml_dict([("arch", d)])
@staticmethod
def from_dict(d):
"""Import an ArchSpec from raw YAML/JSON data"""
- arch = d['arch']
- target = spack.target.Target.from_dict_or_value(arch['target'])
- return ArchSpec((arch['platform'], arch['platform_os'], target))
+ arch = d["arch"]
+ target = spack.target.Target.from_dict_or_value(arch["target"])
+ return ArchSpec((arch["platform"], arch["platform_os"], target))
def __str__(self):
return "%s-%s-%s" % (self.platform, self.os, self.target)
def __repr__(self):
- fmt = 'ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))'
+ fmt = "ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))"
return fmt.format(self, str(self.target))
def __contains__(self, string):
@@ -546,8 +550,8 @@ class ArchSpec(object):
@lang.lazy_lexicographic_ordering
class CompilerSpec(object):
"""The CompilerSpec field represents the compiler or range of compiler
- versions that a package should be built with. CompilerSpecs have a
- name and a version list. """
+ versions that a package should be built with. CompilerSpecs have a
+ name and a version list."""
def __init__(self, *args):
nargs = len(args)
@@ -566,8 +570,9 @@ class CompilerSpec(object):
else:
raise TypeError(
- "Can only build CompilerSpec from string or " +
- "CompilerSpec. Found %s" % type(arg))
+ "Can only build CompilerSpec from string or "
+ + "CompilerSpec. Found %s" % type(arg)
+ )
elif nargs == 2:
name, version = args
@@ -576,17 +581,16 @@ class CompilerSpec(object):
self.versions.add(vn.ver(version))
else:
- raise TypeError(
- "__init__ takes 1 or 2 arguments. (%d given)" % nargs)
+ raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs)
def _add_versions(self, version_list):
# If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.VersionList(':'):
+ if self.versions and self.versions != vn.VersionList(":"):
# Note: This may be impossible to reach by the current parser
# Keeping it in case the implementation changes.
raise MultipleVersionError(
- 'A spec cannot contain multiple version signifiers.'
- ' Use a version list instead.')
+ "A spec cannot contain multiple version signifiers." " Use a version list instead."
+ )
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
@@ -598,8 +602,7 @@ class CompilerSpec(object):
def satisfies(self, other, strict=False):
other = self._autospec(other)
- return (self.name == other.name and
- self.versions.satisfies(other.versions, strict=strict))
+ return self.name == other.name and self.versions.satisfies(other.versions, strict=strict)
def constrain(self, other):
"""Intersect self's versions with other.
@@ -617,7 +620,7 @@ class CompilerSpec(object):
@property
def concrete(self):
"""A CompilerSpec is concrete if its versions are concrete and there
- is an available compiler with the right version."""
+ is an available compiler with the right version."""
return self.versions.concrete
@property
@@ -637,15 +640,15 @@ class CompilerSpec(object):
yield self.versions
def to_dict(self):
- d = syaml.syaml_dict([('name', self.name)])
+ d = syaml.syaml_dict([("name", self.name)])
d.update(self.versions.to_dict())
- return syaml.syaml_dict([('compiler', d)])
+ return syaml.syaml_dict([("compiler", d)])
@staticmethod
def from_dict(d):
- d = d['compiler']
- return CompilerSpec(d['name'], vn.VersionList.from_dict(d))
+ d = d["compiler"]
+ return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
def __str__(self):
out = self.name
@@ -692,9 +695,7 @@ class DependencySpec(object):
return DependencySpec(self.parent, self.spec, self.deptypes)
def add_type(self, type):
- self.deptypes = dp.canonical_deptype(
- self.deptypes + dp.canonical_deptype(type)
- )
+ self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
def _cmp_iter(self):
yield self.parent.name if self.parent else None
@@ -702,31 +703,31 @@ class DependencySpec(object):
yield self.deptypes
def __str__(self):
- return "%s %s--> %s" % (self.parent.name if self.parent else None,
- self.deptypes,
- self.spec.name if self.spec else None)
+ return "%s %s--> %s" % (
+ self.parent.name if self.parent else None,
+ self.deptypes,
+ self.spec.name if self.spec else None,
+ )
def canonical(self):
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
-_valid_compiler_flags = [
- 'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
+_valid_compiler_flags = ["cflags", "cxxflags", "fflags", "ldflags", "ldlibs", "cppflags"]
class FlagMap(lang.HashableMap):
-
def __init__(self, spec):
super(FlagMap, self).__init__()
self.spec = spec
def satisfies(self, other, strict=False):
if strict or (self.spec and self.spec._concrete):
- return all(f in self and set(self[f]) == set(other[f])
- for f in other)
+ return all(f in self and set(self[f]) == set(other[f]) for f in other)
else:
- return all(set(self[f]) == set(other[f])
- for f in other if (other[f] != [] and f in self))
+ return all(
+ set(self[f]) == set(other[f]) for f in other if (other[f] != [] and f in self)
+ )
def constrain(self, other):
"""Add all flags in other that aren't in self to self.
@@ -736,15 +737,14 @@ class FlagMap(lang.HashableMap):
if other.spec and other.spec._concrete:
for k in self:
if k not in other:
- raise UnsatisfiableCompilerFlagSpecError(
- self[k], '<absent>')
+ raise UnsatisfiableCompilerFlagSpecError(self[k], "<absent>")
changed = False
for k in other:
if k in self and not set(self[k]) <= set(other[k]):
raise UnsatisfiableCompilerFlagSpecError(
- ' '.join(f for f in self[k]),
- ' '.join(f for f in other[k]))
+ " ".join(f for f in self[k]), " ".join(f for f in other[k])
+ )
elif k not in self:
self[k] = other[k]
changed = True
@@ -772,11 +772,14 @@ class FlagMap(lang.HashableMap):
def __str__(self):
sorted_keys = [k for k in sorted(self.keys()) if self[k] != []]
- cond_symbol = ' ' if len(sorted_keys) > 0 else ''
- return cond_symbol + ' '.join(
- str(key) + '=\"' + ' '.join(
- str(f) for f in self[key]) + '\"'
- for key in sorted_keys) + cond_symbol
+ cond_symbol = " " if len(sorted_keys) > 0 else ""
+ return (
+ cond_symbol
+ + " ".join(
+ str(key) + '="' + " ".join(str(f) for f in self[key]) + '"' for key in sorted_keys
+ )
+ + cond_symbol
+ )
def _sort_by_dep_types(dspec):
@@ -798,6 +801,7 @@ class _EdgeMap(Mapping):
Edges are stored in a dictionary and keyed by package name.
"""
+
def __init__(self, store_by=EdgeDirection.child):
# Sanitize input arguments
msg = 'unexpected value for "store_by" argument'
@@ -806,7 +810,7 @@ class _EdgeMap(Mapping):
#: This dictionary maps a package name to a list of edges
#: i.e. to a list of DependencySpec objects
self.edges = {}
- self.store_by_child = (store_by == EdgeDirection.child)
+ self.store_by_child = store_by == EdgeDirection.child
def __getitem__(self, key):
return self.edges[key]
@@ -829,7 +833,7 @@ class _EdgeMap(Mapping):
current_list.sort(key=_sort_by_dep_types)
def __str__(self):
- return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
+ return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
def _cmp_iter(self):
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
@@ -882,9 +886,9 @@ class _EdgeMap(Mapping):
# Filter by allowed dependency types
if deptypes:
selected = (
- dep for dep in selected
- if not dep.deptypes or
- any(d in deptypes for d in dep.deptypes)
+ dep
+ for dep in selected
+ if not dep.deptypes or any(d in deptypes for d in dep.deptypes)
)
return list(selected)
@@ -910,13 +914,13 @@ def _command_default_handler(descriptor, spec, cls):
Raises:
RuntimeError: If the command is not found
"""
- home = getattr(spec.package, 'home')
+ home = getattr(spec.package, "home")
path = os.path.join(home.bin, spec.name)
if fs.is_exe(path):
return spack.util.executable.Executable(path)
else:
- msg = 'Unable to locate {0} command in {1}'
+ msg = "Unable to locate {0} command in {1}"
raise RuntimeError(msg.format(spec.name, home.bin))
@@ -938,15 +942,14 @@ def _headers_default_handler(descriptor, spec, cls):
Raises:
NoHeadersError: If no headers are found
"""
- home = getattr(spec.package, 'home')
- headers = fs.find_headers('*', root=home.include, recursive=True)
+ home = getattr(spec.package, "home")
+ headers = fs.find_headers("*", root=home.include, recursive=True)
if headers:
return headers
else:
- msg = 'Unable to locate {0} headers in {1}'
- raise spack.error.NoHeadersError(
- msg.format(spec.name, home))
+ msg = "Unable to locate {0} headers in {1}"
+ raise spack.error.NoHeadersError(msg.format(spec.name, home))
def _libs_default_handler(descriptor, spec, cls):
@@ -979,25 +982,25 @@ def _libs_default_handler(descriptor, spec, cls):
# depending on which one exists (there is a possibility, of course, to
# get something like 'libabcXabc.so, but for now we consider this
# unlikely).
- name = spec.name.replace('-', '?')
- home = getattr(spec.package, 'home')
+ name = spec.name.replace("-", "?")
+ home = getattr(spec.package, "home")
# Avoid double 'lib' for packages whose names already start with lib
- if not name.startswith('lib'):
- name = 'lib' + name
+ if not name.startswith("lib"):
+ name = "lib" + name
# If '+shared' search only for shared library; if '~shared' search only for
# static library; otherwise, first search for shared and then for static.
- search_shared = [True] if ('+shared' in spec) else \
- ([False] if ('~shared' in spec) else [True, False])
+ search_shared = (
+ [True] if ("+shared" in spec) else ([False] if ("~shared" in spec) else [True, False])
+ )
for shared in search_shared:
- libs = fs.find_libraries(
- name, home, shared=shared, recursive=True)
+ libs = fs.find_libraries(name, home, shared=shared, recursive=True)
if libs:
return libs
- msg = 'Unable to recursively locate {0} libraries in {1}'
+ msg = "Unable to recursively locate {0} libraries in {1}"
raise spack.error.NoLibrariesError(msg.format(spec.name, home))
@@ -1051,9 +1054,7 @@ class ForwardQueryToPackage(object):
callbacks_chain = []
# First in the chain : specialized attribute for virtual packages
if query.isvirtual:
- specialized_name = '{0}_{1}'.format(
- query.name, self.attribute_name
- )
+ specialized_name = "{0}_{1}".format(query.name, self.attribute_name)
callbacks_chain.append(lambda: getattr(pkg, specialized_name))
# Try to get the generic method from Package
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
@@ -1071,14 +1072,17 @@ class ForwardQueryToPackage(object):
# A callback can return None to trigger an error indicating
# that the query failed.
if value is None:
- msg = "Query of package '{name}' for '{attrib}' failed\n"
+ msg = "Query of package '{name}' for '{attrib}' failed\n"
msg += "\tprefix : {spec.prefix}\n"
msg += "\tspec : {spec}\n"
msg += "\tqueried as : {query.name}\n"
msg += "\textra parameters : {query.extra_parameters}"
message = msg.format(
- name=pkg.name, attrib=self.attribute_name,
- spec=instance, query=instance.last_query)
+ name=pkg.name,
+ attrib=self.attribute_name,
+ spec=instance,
+ query=instance.last_query,
+ )
else:
return value
break
@@ -1095,15 +1099,11 @@ class ForwardQueryToPackage(object):
# properties defined and no default handler, or that all callbacks
# raised AttributeError. In this case, we raise AttributeError with an
# appropriate message.
- fmt = '\'{name}\' package has no relevant attribute \'{query}\'\n'
- fmt += '\tspec : \'{spec}\'\n'
- fmt += '\tqueried as : \'{spec.last_query.name}\'\n'
- fmt += '\textra parameters : \'{spec.last_query.extra_parameters}\'\n'
- message = fmt.format(
- name=pkg.name,
- query=self.attribute_name,
- spec=instance
- )
+ fmt = "'{name}' package has no relevant attribute '{query}'\n"
+ fmt += "\tspec : '{spec}'\n"
+ fmt += "\tqueried as : '{spec.last_query.name}'\n"
+ fmt += "\textra parameters : '{spec.last_query.extra_parameters}'\n"
+ message = fmt.format(name=pkg.name, query=self.attribute_name, spec=instance)
raise AttributeError(message)
def __set__(self, instance, value):
@@ -1113,42 +1113,29 @@ class ForwardQueryToPackage(object):
# Represents a query state in a BuildInterface object
-QueryState = collections.namedtuple(
- 'QueryState', ['name', 'extra_parameters', 'isvirtual']
-)
+QueryState = collections.namedtuple("QueryState", ["name", "extra_parameters", "isvirtual"])
class SpecBuildInterface(lang.ObjectWrapper):
# home is available in the base Package so no default is needed
- home = ForwardQueryToPackage('home', default_handler=None)
+ home = ForwardQueryToPackage("home", default_handler=None)
- command = ForwardQueryToPackage(
- 'command',
- default_handler=_command_default_handler
- )
+ command = ForwardQueryToPackage("command", default_handler=_command_default_handler)
- headers = ForwardQueryToPackage(
- 'headers',
- default_handler=_headers_default_handler
- )
+ headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
- libs = ForwardQueryToPackage(
- 'libs',
- default_handler=_libs_default_handler
- )
+ libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
def __init__(self, spec, name, query_parameters):
super(SpecBuildInterface, self).__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
- original_spec = getattr(spec, 'wrapped_obj', spec)
+ original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters
is_virtual = spack.repo.path.is_virtual(name)
self.last_query = QueryState(
- name=name,
- extra_parameters=query_parameters,
- isvirtual=is_virtual
+ name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
def __reduce__(self):
@@ -1171,8 +1158,14 @@ class Spec(object):
s.architecture = ArchSpec.default_arch()
return s
- def __init__(self, spec_like=None, normal=False,
- concrete=False, external_path=None, external_modules=None):
+ def __init__(
+ self,
+ spec_like=None,
+ normal=False,
+ concrete=False,
+ external_path=None,
+ external_modules=None,
+ ):
"""Create a new Spec.
Arguments:
@@ -1195,7 +1188,7 @@ class Spec(object):
# init an empty spec that matches anything.
self.name = None
- self.versions = vn.VersionList(':')
+ self.versions = vn.VersionList(":")
self.variants = vt.VariantMap(self)
self.architecture = None
self.compiler = None
@@ -1277,7 +1270,7 @@ class Spec(object):
self._dependencies.clear()
self._dependents.clear()
- def detach(self, deptype='all'):
+ def detach(self, deptype="all"):
"""Remove any reference that dependencies have of this node.
Args:
@@ -1309,7 +1302,7 @@ class Spec(object):
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
- def edges_from_dependents(self, name=None, deptype='all'):
+ def edges_from_dependents(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to parents.
@@ -1318,12 +1311,9 @@ class Spec(object):
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
- return [
- d for d in
- self._dependents.select(parent=name, deptypes=deptype)
- ]
+ return [d for d in self._dependents.select(parent=name, deptypes=deptype)]
- def edges_to_dependencies(self, name=None, deptype='all'):
+ def edges_to_dependencies(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to children.
@@ -1332,12 +1322,9 @@ class Spec(object):
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
- return [
- d for d in
- self._dependencies.select(child=name, deptypes=deptype)
- ]
+ return [d for d in self._dependencies.select(child=name, deptypes=deptype)]
- def dependencies(self, name=None, deptype='all'):
+ def dependencies(self, name=None, deptype="all"):
"""Return a list of direct dependencies (nodes in the DAG).
Args:
@@ -1346,7 +1333,7 @@ class Spec(object):
"""
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
- def dependents(self, name=None, deptype='all'):
+ def dependents(self, name=None, deptype="all"):
"""Return a list of direct dependents (nodes in the DAG).
Args:
@@ -1355,7 +1342,7 @@ class Spec(object):
"""
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
- def _dependencies_dict(self, deptype='all'):
+ def _dependencies_dict(self, deptype="all"):
"""Return a dictionary, keyed by package name, of the direct
dependencies.
@@ -1369,9 +1356,7 @@ class Spec(object):
deptype = dp.canonical_deptype(deptype)
selected_edges = self._dependencies.select(deptypes=deptype)
result = {}
- for key, group in itertools.groupby(
- sorted(selected_edges, key=_sort_fn), key=_group_fn
- ):
+ for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn):
result[key] = list(group)
return result
@@ -1381,10 +1366,10 @@ class Spec(object):
def _add_versions(self, version_list):
"""Called by the parser to add an allowable version."""
# If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.VersionList(':'):
+ if self.versions and self.versions != vn.VersionList(":"):
raise MultipleVersionError(
- 'A spec cannot contain multiple version signifiers.'
- ' Use a version list instead.')
+ "A spec cannot contain multiple version signifiers." " Use a version list instead."
+ )
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
@@ -1394,47 +1379,48 @@ class Spec(object):
Known flags currently include "arch"
"""
valid_flags = FlagMap.valid_compiler_flags()
- if name == 'arch' or name == 'architecture':
- parts = tuple(value.split('-'))
+ if name == "arch" or name == "architecture":
+ parts = tuple(value.split("-"))
plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
self._set_architecture(platform=plat, os=os, target=tgt)
- elif name == 'platform':
+ elif name == "platform":
self._set_architecture(platform=value)
- elif name == 'os' or name == 'operating_system':
+ elif name == "os" or name == "operating_system":
self._set_architecture(os=value)
- elif name == 'target':
+ elif name == "target":
self._set_architecture(target=value)
elif name in valid_flags:
- assert(self.compiler_flags is not None)
+ assert self.compiler_flags is not None
self.compiler_flags[name] = spack.compiler.tokenize_flags(value)
else:
# FIXME:
# All other flags represent variants. 'foo=true' and 'foo=false'
# map to '+foo' and '~foo' respectively. As such they need a
# BoolValuedVariant instance.
- if str(value).upper() == 'TRUE' or str(value).upper() == 'FALSE':
+ if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
self.variants[name] = vt.BoolValuedVariant(name, value)
else:
self.variants[name] = vt.AbstractVariant(name, value)
def _set_architecture(self, **kwargs):
"""Called by the parser to set the architecture."""
- arch_attrs = ['platform', 'os', 'target']
+ arch_attrs = ["platform", "os", "target"]
if self.architecture and self.architecture.concrete:
raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name)
+ "Spec for '%s' cannot have two architectures." % self.name
+ )
if not self.architecture:
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(new_vals)
else:
- new_attrvals = [(a, v) for a, v in six.iteritems(kwargs)
- if a in arch_attrs]
+ new_attrvals = [(a, v) for a, v in six.iteritems(kwargs) if a in arch_attrs]
for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr):
raise DuplicateArchitectureError(
"Spec for '%s' cannot have two '%s' specified "
- "for its architecture" % (self.name, new_attr))
+ "for its architecture" % (self.name, new_attr)
+ )
else:
setattr(self.architecture, new_attr, new_value)
@@ -1442,14 +1428,14 @@ class Spec(object):
"""Called by the parser to set the compiler."""
if self.compiler:
raise DuplicateCompilerSpecError(
- "Spec for '%s' cannot have two compilers." % self.name)
+ "Spec for '%s' cannot have two compilers." % self.name
+ )
self.compiler = compiler
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
if spec.name in self._dependencies:
- raise DuplicateDependencyError(
- "Cannot depend on '%s' twice" % spec)
+ raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
self.add_dependency_edge(spec, deptypes)
@@ -1466,8 +1452,10 @@ class Spec(object):
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
if any(d in edge.deptypes for d in deptype):
- msg = ('cannot add a dependency on "{0.spec}" of {1} type '
- 'when the "{0.parent}" has the edge {0!s} already')
+ msg = (
+ 'cannot add a dependency on "{0.spec}" of {1} type '
+ 'when the "{0.parent}" has the edge {0!s} already'
+ )
raise spack.error.SpecError(msg.format(edge, deptype))
for edge in selected:
@@ -1499,8 +1487,10 @@ class Spec(object):
@property
def fullname(self):
return (
- ('%s.%s' % (self.namespace, self.name)) if self.namespace else
- (self.name if self.name else ''))
+ ("%s.%s" % (self.namespace, self.name))
+ if self.namespace
+ else (self.name if self.name else "")
+ )
@property
def root(self):
@@ -1525,7 +1515,7 @@ class Spec(object):
@property
def package_class(self):
"""Internal package call gets only the class object for a package.
- Use this to just get package metadata.
+ Use this to just get package metadata.
"""
return spack.repo.path.get_pkg_class(self.fullname)
@@ -1533,10 +1523,10 @@ class Spec(object):
def virtual(self):
"""Right now, a spec is virtual if no package exists with its name.
- TODO: revisit this -- might need to use a separate namespace and
- be more explicit about this.
- Possible idea: just use conventin and make virtual deps all
- caps, e.g., MPI vs mpi.
+ TODO: revisit this -- might need to use a separate namespace and
+ be more explicit about this.
+ Possible idea: just use conventin and make virtual deps all
+ caps, e.g., MPI vs mpi.
"""
# This method can be called while regenerating the provider index
# So we turn off using the index to detect virtuals
@@ -1596,11 +1586,11 @@ class Spec(object):
return upstream
def traverse(self, **kwargs):
- direction = kwargs.get('direction', 'children')
- depth = kwargs.get('depth', False)
+ direction = kwargs.get("direction", "children")
+ depth = kwargs.get("depth", False)
get_spec = lambda s: s.spec
- if direction == 'parents':
+ if direction == "parents":
get_spec = lambda s: s.parent
if depth:
@@ -1610,66 +1600,65 @@ class Spec(object):
for dspec in self.traverse_edges(**kwargs):
yield get_spec(dspec)
- def traverse_edges(self, visited=None, d=0, deptype='all',
- dep_spec=None, **kwargs):
+ def traverse_edges(self, visited=None, d=0, deptype="all", dep_spec=None, **kwargs):
"""Generic traversal of the DAG represented by this spec.
- This yields ``DependencySpec`` objects as they are traversed.
+ This yields ``DependencySpec`` objects as they are traversed.
- When traversing top-down, an imaginary incoming edge to the root
- is yielded first as ``DependencySpec(None, root, ())``. When
- traversing bottom-up, imaginary edges to leaves are yielded first
- as ``DependencySpec(left, None, ())`` objects.
+ When traversing top-down, an imaginary incoming edge to the root
+ is yielded first as ``DependencySpec(None, root, ())``. When
+ traversing bottom-up, imaginary edges to leaves are yielded first
+ as ``DependencySpec(left, None, ())`` objects.
- Options:
+ Options:
- order [=pre|post]
- Order to traverse spec nodes. Defaults to preorder traversal.
- Options are:
+ order [=pre|post]
+ Order to traverse spec nodes. Defaults to preorder traversal.
+ Options are:
- 'pre': Pre-order traversal; each node is yielded before its
- children in the dependency DAG.
- 'post': Post-order traversal; each node is yielded after its
- children in the dependency DAG.
+ 'pre': Pre-order traversal; each node is yielded before its
+ children in the dependency DAG.
+ 'post': Post-order traversal; each node is yielded after its
+ children in the dependency DAG.
- cover [=nodes|edges|paths]
- Determines how extensively to cover the dag. Possible values:
+ cover [=nodes|edges|paths]
+ Determines how extensively to cover the dag. Possible values:
- 'nodes': Visit each node in the dag only once. Every node
- yielded by this function will be unique.
- 'edges': If a node has been visited once but is reached along a
- new path from the root, yield it but do not descend
- into it. This traverses each 'edge' in the DAG once.
- 'paths': Explore every unique path reachable from the root.
- This descends into visited subtrees and will yield
- nodes twice if they're reachable by multiple paths.
+ 'nodes': Visit each node in the dag only once. Every node
+ yielded by this function will be unique.
+ 'edges': If a node has been visited once but is reached along a
+ new path from the root, yield it but do not descend
+ into it. This traverses each 'edge' in the DAG once.
+ 'paths': Explore every unique path reachable from the root.
+ This descends into visited subtrees and will yield
+ nodes twice if they're reachable by multiple paths.
- depth [=False]
- Defaults to False. When True, yields not just nodes in the
- spec, but also their depth from the root in a (depth, node)
- tuple.
+ depth [=False]
+ Defaults to False. When True, yields not just nodes in the
+ spec, but also their depth from the root in a (depth, node)
+ tuple.
- key [=id]
- Allow a custom key function to track the identity of nodes
- in the traversal.
+ key [=id]
+ Allow a custom key function to track the identity of nodes
+ in the traversal.
- root [=True]
- If False, this won't yield the root node, just its descendents.
+ root [=True]
+ If False, this won't yield the root node, just its descendents.
- direction [=children|parents]
- If 'children', does a traversal of this spec's children. If
- 'parents', traverses upwards in the DAG towards the root.
+ direction [=children|parents]
+ If 'children', does a traversal of this spec's children. If
+ 'parents', traverses upwards in the DAG towards the root.
"""
# get initial values for kwargs
- depth = kwargs.get('depth', False)
- key_fun = kwargs.get('key', id)
+ depth = kwargs.get("depth", False)
+ key_fun = kwargs.get("key", id)
if isinstance(key_fun, six.string_types):
key_fun = operator.attrgetter(key_fun)
- yield_root = kwargs.get('root', True)
- cover = kwargs.get('cover', 'nodes')
- direction = kwargs.get('direction', 'children')
- order = kwargs.get('order', 'pre')
+ yield_root = kwargs.get("root", True)
+ cover = kwargs.get("cover", "nodes")
+ direction = kwargs.get("direction", "children")
+ order = kwargs.get("order", "pre")
# we don't want to run canonical_deptype every time through
# traverse, because it is somewhat expensive. This ensures we
@@ -1684,24 +1673,27 @@ class Spec(object):
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
- raise ValueError("Invalid value for %s: %s. Choices are %s"
- % (name, val, ",".join(allowed_values)))
- validate('cover', cover, ('nodes', 'edges', 'paths'))
- validate('direction', direction, ('children', 'parents'))
- validate('order', order, ('pre', 'post'))
+ raise ValueError(
+ "Invalid value for %s: %s. Choices are %s"
+ % (name, val, ",".join(allowed_values))
+ )
+
+ validate("cover", cover, ("nodes", "edges", "paths"))
+ validate("direction", direction, ("children", "parents"))
+ validate("order", order, ("pre", "post"))
if visited is None:
visited = set()
key = key_fun(self)
# Node traversal does not yield visited nodes.
- if key in visited and cover == 'nodes':
+ if key in visited and cover == "nodes":
return
def return_val(dspec):
if not dspec:
# make a fake dspec for the root.
- if direction == 'parents':
+ if direction == "parents":
dspec = DependencySpec(self, None, ())
else:
dspec = DependencySpec(None, self, ())
@@ -1710,52 +1702,50 @@ class Spec(object):
yield_me = yield_root or d > 0
# Preorder traversal yields before successors
- if yield_me and order == 'pre':
+ if yield_me and order == "pre":
yield return_val(dep_spec)
# Edge traversal yields but skips children of visited nodes
- if not (key in visited and cover == 'edges'):
+ if not (key in visited and cover == "edges"):
visited.add(key)
# This code determines direction and yields the children/parents
- if direction == 'children':
+ if direction == "children":
edges = self.edges_to_dependencies
key_fn = lambda dspec: dspec.spec.name
succ = lambda dspec: dspec.spec
- elif direction == 'parents':
+ elif direction == "parents":
edges = self.edges_from_dependents
key_fn = lambda dspec: dspec.parent.name
succ = lambda dspec: dspec.parent
else:
- raise ValueError('Invalid traversal direction: %s' % direction)
+ raise ValueError("Invalid traversal direction: %s" % direction)
for dspec in sorted(edges(), key=key_fn):
dt = dspec.deptypes
if dt and not any(d in deptype for d in dt):
continue
- for child in succ(dspec).traverse_edges(
- visited, d + 1, deptype, dspec, **kwargs
- ):
+ for child in succ(dspec).traverse_edges(visited, d + 1, deptype, dspec, **kwargs):
yield child
# Postorder traversal yields after successors
- if yield_me and order == 'post':
+ if yield_me and order == "post":
yield return_val(dep_spec)
@property
def short_spec(self):
"""Returns a version of the spec with the dependencies hashed
- instead of completely enumerated."""
- spec_format = '{name}{@version}{%compiler}'
- spec_format += '{variants}{arch=architecture}{/hash:7}'
+ instead of completely enumerated."""
+ spec_format = "{name}{@version}{%compiler}"
+ spec_format += "{variants}{arch=architecture}{/hash:7}"
return self.format(spec_format)
@property
def cshort_spec(self):
"""Returns an auto-colorized version of ``self.short_spec``."""
- spec_format = '{name}{@version}{%compiler}'
- spec_format += '{variants}{arch=architecture}{/hash:7}'
+ spec_format = "{name}{@version}{%compiler}"
+ spec_format += "{variants}{arch=architecture}{/hash:7}"
return self.cformat(spec_format)
@property
@@ -1764,8 +1754,7 @@ class Spec(object):
raise spack.error.SpecError("Spec is not concrete: " + str(self))
if self._prefix is None:
- upstream, record = spack.store.db.query_by_spec_hash(
- self.dag_hash())
+ upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
if record and record.path:
self.prefix = record.path
else:
@@ -1904,10 +1893,10 @@ class Spec(object):
Arguments:
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
- """
+ """
d = syaml.syaml_dict()
- d['name'] = self.name
+ d["name"] = self.name
if self.versions:
d.update(self.versions.to_dict())
@@ -1919,32 +1908,30 @@ class Spec(object):
d.update(self.compiler.to_dict())
if self.namespace:
- d['namespace'] = self.namespace
+ d["namespace"] = self.namespace
- params = syaml.syaml_dict(
- sorted(
- v.yaml_entry() for _, v in self.variants.items()
- )
- )
+ params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items()))
params.update(sorted(self.compiler_flags.items()))
if params:
- d['parameters'] = params
+ d["parameters"] = params
if self.external:
- d['external'] = syaml.syaml_dict([
- ('path', self.external_path),
- ('module', self.external_modules),
- ('extra_attributes', self.extra_attributes)
- ])
+ d["external"] = syaml.syaml_dict(
+ [
+ ("path", self.external_path),
+ ("module", self.external_modules),
+ ("extra_attributes", self.extra_attributes),
+ ]
+ )
if not self._concrete:
- d['concrete'] = False
+ d["concrete"] = False
- if 'patches' in self.variants:
- variant = self.variants['patches']
- if hasattr(variant, '_patches_in_order_of_appearance'):
- d['patches'] = variant._patches_in_order_of_appearance
+ if "patches" in self.variants:
+ variant = self.variants["patches"]
+ if hasattr(variant, "_patches_in_order_of_appearance"):
+ d["patches"] = variant._patches_in_order_of_appearance
if self._concrete and hash.package_hash and self._package_hash:
# We use the attribute here instead of `self.package_hash()` because this
@@ -1955,31 +1942,29 @@ class Spec(object):
package_hash = self._package_hash
# Full hashes are in bytes
- if (not isinstance(package_hash, six.text_type)
- and isinstance(package_hash, six.binary_type)):
- package_hash = package_hash.decode('utf-8')
- d['package_hash'] = package_hash
+ if not isinstance(package_hash, six.text_type) and isinstance(
+ package_hash, six.binary_type
+ ):
+ package_hash = package_hash.decode("utf-8")
+ d["package_hash"] = package_hash
# Note: Relies on sorting dict by keys later in algorithm.
deps = self._dependencies_dict(deptype=hash.deptype)
if deps:
deps_list = []
for name, edges_for_name in sorted(deps.items()):
- name_tuple = ('name', name)
+ name_tuple = ("name", name)
for dspec in edges_for_name:
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
- type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
- deps_list.append(syaml.syaml_dict(
- [name_tuple, hash_tuple, type_tuple]
- ))
- d['dependencies'] = deps_list
+ type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
+ deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
+ d["dependencies"] = deps_list
# Name is included in case this is replacing a virtual.
if self._build_spec:
- d['build_spec'] = syaml.syaml_dict([
- ('name', self.build_spec.name),
- (hash.name, self.build_spec._cached_hash(hash))
- ])
+ d["build_spec"] = syaml.syaml_dict(
+ [("name", self.build_spec.name), (hash.name, self.build_spec._cached_hash(hash))]
+ )
return d
def to_dict(self, hash=ht.dag_hash):
@@ -2066,7 +2051,7 @@ class Spec(object):
"""
node_list = [] # Using a list to preserve preorder traversal for hash.
hash_set = set()
- for s in self.traverse(order='pre', deptype=hash.deptype):
+ for s in self.traverse(order="pre", deptype=hash.deptype):
spec_hash = s._cached_hash(hash)
if spec_hash not in hash_set:
@@ -2074,22 +2059,22 @@ class Spec(object):
hash_set.add(spec_hash)
if s.build_spec is not s:
- build_spec_list = s.build_spec.to_dict(hash)['spec']['nodes']
+ build_spec_list = s.build_spec.to_dict(hash)["spec"]["nodes"]
for node in build_spec_list:
node_hash = node[hash.name]
if node_hash not in hash_set:
node_list.append(node)
hash_set.add(node_hash)
- meta_dict = syaml.syaml_dict([('version', specfile_format_version)])
- inner_dict = syaml.syaml_dict([('_meta', meta_dict), ('nodes', node_list)])
- spec_dict = syaml.syaml_dict([('spec', inner_dict)])
+ meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
+ inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
+ spec_dict = syaml.syaml_dict([("spec", inner_dict)])
return spec_dict
def node_dict_with_hashes(self, hash=ht.dag_hash):
- """ Returns a node_dict of this spec with the dag hash added. If this
+ """Returns a node_dict of this spec with the dag hash added. If this
spec is concrete, the full hash is added as well. If 'build' is in
- the hash_type, the build hash is also added. """
+ the hash_type, the build hash is also added."""
node = self.to_node_dict(hash)
node[ht.dag_hash.name] = self.dag_hash()
@@ -2101,7 +2086,7 @@ class Spec(object):
node[ht.dag_hash.name] = self.dag_hash()
else:
- node['concrete'] = False
+ node["concrete"] = False
# we can also give them other hash types if we want
if hash.name != ht.dag_hash.name:
@@ -2110,8 +2095,7 @@ class Spec(object):
return node
def to_yaml(self, stream=None, hash=ht.dag_hash):
- return syaml.dump(
- self.to_dict(hash), stream=stream, default_flow_style=False)
+ return syaml.dump(self.to_dict(hash), stream=stream, default_flow_style=False)
def to_json(self, stream=None, hash=ht.dag_hash):
return sjson.dump(self.to_dict(hash), stream)
@@ -2119,18 +2103,18 @@ class Spec(object):
@staticmethod
def from_specfile(path):
"""Construct a spec from aJSON or YAML spec file path"""
- with open(path, 'r') as fd:
+ with open(path, "r") as fd:
file_content = fd.read()
- if path.endswith('.json'):
+ if path.endswith(".json"):
return Spec.from_json(file_content)
return Spec.from_yaml(file_content)
@staticmethod
def from_node_dict(node):
spec = Spec()
- if 'name' in node.keys():
+ if "name" in node.keys():
# New format
- name = node['name']
+ name = node["name"]
else:
# Old format
name = next(iter(node))
@@ -2139,59 +2123,54 @@ class Spec(object):
setattr(spec, h.attr, node.get(h.name, None))
spec.name = name
- spec.namespace = node.get('namespace', None)
+ spec.namespace = node.get("namespace", None)
- if 'version' in node or 'versions' in node:
+ if "version" in node or "versions" in node:
spec.versions = vn.VersionList.from_dict(node)
- if 'arch' in node:
+ if "arch" in node:
spec.architecture = ArchSpec.from_dict(node)
- if 'compiler' in node:
+ if "compiler" in node:
spec.compiler = CompilerSpec.from_dict(node)
else:
spec.compiler = None
- if 'parameters' in node:
- for name, value in node['parameters'].items():
+ if "parameters" in node:
+ for name, value in node["parameters"].items():
if name in _valid_compiler_flags:
spec.compiler_flags[name] = value
else:
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
- name, value)
- elif 'variants' in node:
- for name, value in node['variants'].items():
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
- name, value
- )
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
+ elif "variants" in node:
+ for name, value in node["variants"].items():
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
for name in FlagMap.valid_compiler_flags():
spec.compiler_flags[name] = []
spec.external_path = None
spec.external_modules = None
- if 'external' in node:
+ if "external" in node:
# This conditional is needed because sometimes this function is
# called with a node already constructed that contains a 'versions'
# and 'external' field. Related to virtual packages provider
# indexes.
- if node['external']:
- spec.external_path = node['external']['path']
- spec.external_modules = node['external']['module']
+ if node["external"]:
+ spec.external_path = node["external"]["path"]
+ spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
- spec.extra_attributes = node['external'].get(
- 'extra_attributes', syaml.syaml_dict()
+ spec.extra_attributes = node["external"].get(
+ "extra_attributes", syaml.syaml_dict()
)
# specs read in are concrete unless marked abstract
- spec._concrete = node.get('concrete', True)
+ spec._concrete = node.get("concrete", True)
- if 'patches' in node:
- patches = node['patches']
+ if "patches" in node:
+ patches = node["patches"]
if len(patches) > 0:
- mvar = spec.variants.setdefault(
- 'patches', vt.MultiValuedVariant('patches', ())
- )
+ mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = patches
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
@@ -2204,20 +2183,20 @@ class Spec(object):
@staticmethod
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
- build_spec_dict = node['build_spec']
- return build_spec_dict['name'], build_spec_dict[hash_type], hash_type
+ build_spec_dict = node["build_spec"]
+ return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
@staticmethod
def dependencies_from_node_dict(node):
- if 'name' in node.keys():
+ if "name" in node.keys():
# New format
- name = node['name']
+ name = node["name"]
else:
name = next(iter(node))
node = node[name]
- if 'dependencies' not in node:
+ if "dependencies" not in node:
return
- for t in Spec.read_yaml_dep_specs(node['dependencies']):
+ for t in Spec.read_yaml_dep_specs(node["dependencies"]):
yield t
@staticmethod
@@ -2232,10 +2211,10 @@ class Spec(object):
dep_name, elt = dep
else:
elt = dep
- dep_name = dep['name']
+ dep_name = dep["name"]
if isinstance(elt, six.string_types):
# original format, elt is just the dependency hash.
- dep_hash, deptypes = elt, ['build', 'link']
+ dep_hash, deptypes = elt, ["build", "link"]
elif isinstance(elt, tuple):
# original deptypes format: (used tuples, not future-proof)
dep_hash, deptypes = elt
@@ -2243,14 +2222,13 @@ class Spec(object):
# new format: elements of dependency spec are keyed.
for h in ht.hashes:
if h.name in elt:
- dep_hash, deptypes = elt[h.name], elt['type']
+ dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
- raise spack.error.SpecError(
- "Couldn't parse dependency types in spec.")
+ raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type
@staticmethod
@@ -2360,7 +2338,7 @@ class Spec(object):
s (str): key in the dictionary containing the literal
"""
- t = s.split(':')
+ t = s.split(":")
if len(t) > 2:
msg = 'more than one ":" separator in key "{0}"'
@@ -2368,7 +2346,7 @@ class Spec(object):
n = t[0]
if len(t) == 2:
- dtypes = tuple(dt.strip() for dt in t[1].split(','))
+ dtypes = tuple(dt.strip() for dt in t[1].split(","))
else:
dtypes = ()
@@ -2388,7 +2366,7 @@ class Spec(object):
return s, ()
spec_obj, dtypes = s
- return spec_obj, tuple(dt.strip() for dt in dtypes.split(','))
+ return spec_obj, tuple(dt.strip() for dt in dtypes.split(","))
# Recurse on dependencies
for s, s_dependencies in dep_like.items():
@@ -2462,7 +2440,7 @@ class Spec(object):
stream: string or file object to read from.
"""
data = stream
- if hasattr(stream, 'read'):
+ if hasattr(stream, "read"):
data = stream.read()
extracted_json = Spec.extract_json_from_clearsig(data)
@@ -2496,22 +2474,21 @@ class Spec(object):
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
- msg = ('cannot validate "{0}" since it was not created '
- 'using Spec.from_detection'.format(self))
+ msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
+ self
+ )
assert isinstance(self.extra_attributes, Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.path.get_pkg_class(self.name)
- validate_fn = getattr(
- pkg_cls, 'validate_detected_spec', lambda x, y: None
- )
+ validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _concretize_helper(self, concretizer, presets=None, visited=None):
"""Recursive helper function for concretize().
- This concretizes everything bottom-up. As things are
- concretized, they're added to the presets, and ancestors
- will prefer the settings of their children.
+ This concretizes everything bottom-up. As things are
+ concretized, they're added to the presets, and ancestors
+ will prefer the settings of their children.
"""
if presets is None:
presets = {}
@@ -2547,14 +2524,17 @@ class Spec(object):
# still need to select a concrete package later.
if not self.virtual:
changed |= any(
- (concretizer.concretize_develop(self), # special variant
- concretizer.concretize_architecture(self),
- concretizer.concretize_compiler(self),
- concretizer.adjust_target(self),
- # flags must be concretized after compiler
- concretizer.concretize_compiler_flags(self),
- concretizer.concretize_version(self),
- concretizer.concretize_variants(self)))
+ (
+ concretizer.concretize_develop(self), # special variant
+ concretizer.concretize_architecture(self),
+ concretizer.concretize_compiler(self),
+ concretizer.adjust_target(self),
+ # flags must be concretized after compiler
+ concretizer.concretize_compiler_flags(self),
+ concretizer.concretize_version(self),
+ concretizer.concretize_variants(self),
+ )
+ )
presets[self.name] = self
visited.add(self.name)
@@ -2577,24 +2557,23 @@ class Spec(object):
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
- and normalize again to include the provider's (potentially virtual)
- dependencies. Repeat until there are no virtual deps.
+ and normalize again to include the provider's (potentially virtual)
+ dependencies. Repeat until there are no virtual deps.
- Precondition: spec is normalized.
+ Precondition: spec is normalized.
- .. todo::
+ .. todo::
- If a provider depends on something that conflicts with
- other dependencies in the spec being expanded, this can
- produce a conflicting spec. For example, if mpich depends
- on hwloc@:1.3 but something in the spec needs hwloc1.4:,
- then we should choose an MPI other than mpich. Cases like
- this are infrequent, but should implement this before it is
- a problem.
+ If a provider depends on something that conflicts with
+ other dependencies in the spec being expanded, this can
+ produce a conflicting spec. For example, if mpich depends
+ on hwloc@:1.3 but something in the spec needs hwloc1.4:,
+ then we should choose an MPI other than mpich. Cases like
+ this are infrequent, but should implement this before it is
+ a problem.
"""
# Make an index of stuff this spec already provides
- self_index = spack.provider_index.ProviderIndex(
- self.traverse(), restrict=True)
+ self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
@@ -2653,16 +2632,15 @@ class Spec(object):
return (not cfield) or (cfield == sfield)
if replacement is spec or (
- feq(replacement.name, spec.name) and
- feq(replacement.versions, spec.versions) and
- feq(replacement.compiler, spec.compiler) and
- feq(replacement.architecture, spec.architecture) and
- feq(replacement._dependencies, spec._dependencies) and
- feq(replacement.variants, spec.variants) and
- feq(replacement.external_path,
- spec.external_path) and
- feq(replacement.external_modules,
- spec.external_modules)):
+ feq(replacement.name, spec.name)
+ and feq(replacement.versions, spec.versions)
+ and feq(replacement.compiler, spec.compiler)
+ and feq(replacement.architecture, spec.architecture)
+ and feq(replacement._dependencies, spec._dependencies)
+ and feq(replacement.variants, spec.variants)
+ and feq(replacement.external_path, spec.external_path)
+ and feq(replacement.external_modules, spec.external_modules)
+ ):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
@@ -2703,14 +2681,15 @@ class Spec(object):
# Add a warning message to inform users that the original concretizer
# will be removed
if deprecation_warning:
- msg = ('the original concretizer is currently being used.\n\tUpgrade to '
- '"clingo" at your earliest convenience. The original concretizer '
- 'will be removed from Spack in a future version.')
+ msg = (
+ "the original concretizer is currently being used.\n\tUpgrade to "
+ '"clingo" at your earliest convenience. The original concretizer '
+ "will be removed from Spack in a future version."
+ )
warnings.warn(msg)
if not self.name:
- raise spack.error.SpecError(
- "Attempting to concretize anonymous spec")
+ raise spack.error.SpecError("Attempting to concretize anonymous spec")
if self._concrete:
return
@@ -2721,10 +2700,11 @@ class Spec(object):
user_spec_deps = self.flat_dependencies(copy=False)
concretizer = spack.concretize.Concretizer(self.copy())
while changed:
- changes = (self.normalize(force, tests=tests,
- user_spec_deps=user_spec_deps),
- self._expand_virtual_packages(concretizer),
- self._concretize_helper(concretizer))
+ changes = (
+ self.normalize(force, tests=tests, user_spec_deps=user_spec_deps),
+ self._expand_virtual_packages(concretizer),
+ self._concretize_helper(concretizer),
+ )
changed = any(changes)
force = True
@@ -2786,8 +2766,9 @@ class Spec(object):
# ensure that patch state is consistent
patch_variant = self.variants["patches"]
- assert hasattr(patch_variant, "_patches_in_order_of_appearance"), \
- "patches should always be assigned with a patch variant."
+ assert hasattr(
+ patch_variant, "_patches_in_order_of_appearance"
+ ), "patches should always be assigned with a patch variant."
return True
@@ -2821,8 +2802,7 @@ class Spec(object):
spec_to_patches[id(s)] = patches
# Also record all patches required on dependencies by
# depends_on(..., patch=...)
- for dspec in root.traverse_edges(deptype=all,
- cover='edges', root=False):
+ for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
pkg_deps = dspec.parent.package_class.dependencies
if dspec.spec.name not in pkg_deps:
continue
@@ -2833,8 +2813,7 @@ class Spec(object):
patches = []
for cond, dependency in pkg_deps[dspec.spec.name].items():
for pcond, patch_list in dependency.patches.items():
- if (dspec.parent.satisfies(cond, strict=True)
- and dspec.spec.satisfies(pcond)):
+ if dspec.parent.satisfies(cond, strict=True) and dspec.spec.satisfies(pcond):
patches.extend(patch_list)
if patches:
all_patches = spec_to_patches.setdefault(id(dspec.spec), [])
@@ -2844,25 +2823,23 @@ class Spec(object):
continue
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
- mvar = spec.variants.setdefault(
- 'patches', vt.MultiValuedVariant('patches', ())
- )
+ mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches mvar to store patches order
- full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p
- in patches)
+ full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
ordered_hashes = sorted(full_order_keys)
- tty.debug("Ordered hashes [{0}]: ".format(spec.name) +
- ', '.join('/'.join(str(e) for e in t)
- for t in ordered_hashes))
- mvar._patches_in_order_of_appearance = list(
- t[-1] for t in ordered_hashes)
+ tty.debug(
+ "Ordered hashes [{0}]: ".format(spec.name)
+ + ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
+ )
+ mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
@staticmethod
def ensure_external_path_if_external(external_spec):
if external_spec.external_modules and not external_spec.external_path:
compiler = spack.compilers.compiler_for_spec(
- external_spec.compiler, external_spec.architecture)
+ external_spec.compiler, external_spec.architecture
+ )
for mod in compiler.modules:
md.load_module(mod)
@@ -2871,8 +2848,7 @@ class Spec(object):
pkg_cls = spack.repo.path.get_pkg_class(external_spec.name)
package = pkg_cls(external_spec)
external_spec.external_path = getattr(
- package, 'external_prefix',
- md.path_from_modules(external_spec.external_modules)
+ package, "external_prefix", md.path_from_modules(external_spec.external_modules)
)
@staticmethod
@@ -2895,8 +2871,8 @@ class Spec(object):
msg = "\n The following specs have been deprecated"
msg += " in favor of specs with the hashes shown:\n"
for rec in deprecated:
- msg += ' %s --> %s\n' % (rec.spec, rec.deprecated_for)
- msg += '\n'
+ msg += " %s --> %s\n" % (rec.spec, rec.deprecated_for)
+ msg += "\n"
msg += " For each package listed, choose another spec\n"
raise SpecDeprecatedError(msg)
@@ -2904,8 +2880,7 @@ class Spec(object):
import spack.solver.asp
if not self.name:
- raise spack.error.SpecError(
- "Spec has no name; cannot concretize an anonymous spec")
+ raise spack.error.SpecError("Spec has no name; cannot concretize an anonymous spec")
if self._concrete:
return
@@ -2919,8 +2894,7 @@ class Spec(object):
name = self.name
# TODO: Consolidate this code with similar code in solve.py
if self.virtual:
- providers = [spec.name for spec in answer.values()
- if spec.package.provides(name)]
+ providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
name = providers[0]
assert name in answer
@@ -2936,7 +2910,7 @@ class Spec(object):
if a list of names activate them for the packages in the list,
if True activate 'test' dependencies for all packages.
"""
- if spack.config.get('config:concretizer') == "clingo":
+ if spack.config.get("config:concretizer") == "clingo":
self._new_concretize(tests)
else:
self._old_concretize(tests)
@@ -3026,19 +3000,19 @@ class Spec(object):
def flat_dependencies(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
- dependencies with their constraints merged.
+ dependencies with their constraints merged.
- If copy is True, returns merged copies of its dependencies
- without modifying the spec it's called on.
+ If copy is True, returns merged copies of its dependencies
+ without modifying the spec it's called on.
- If copy is False, clears this spec's dependencies and
- returns them. This disconnects all dependency links including
- transitive dependencies, except for concrete specs: if a spec
- is concrete it will not be disconnected from its dependencies
- (although a non-concrete spec with concrete dependencies will
- be disconnected from those dependencies).
+ If copy is False, clears this spec's dependencies and
+ returns them. This disconnects all dependency links including
+ transitive dependencies, except for concrete specs: if a spec
+ is concrete it will not be disconnected from its dependencies
+ (although a non-concrete spec with concrete dependencies will
+ be disconnected from those dependencies).
"""
- copy = kwargs.get('copy', True)
+ copy = kwargs.get("copy", True)
flat_deps = {}
try:
@@ -3070,7 +3044,7 @@ class Spec(object):
e,
)
- def index(self, deptype='all'):
+ def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this
spec.
"""
@@ -3113,18 +3087,18 @@ class Spec(object):
"Cannot merge constraint"
"\n\n\t{1}\n\n"
"into"
- "\n\n\t{2}"
- .format(self, dependency.spec, dep.spec))
+ "\n\n\t{2}".format(self, dependency.spec, dep.spec)
+ )
raise e
return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
- Raise an exception if there is a conflicting virtual
- dependency already in this spec.
+ Raise an exception if there is a conflicting virtual
+ dependency already in this spec.
"""
- assert(vdep.virtual)
+ assert vdep.virtual
# note that this defensively copies.
providers = provider_index.providers_for(vdep)
@@ -3152,8 +3126,7 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
- def _merge_dependency(
- self, dependency, visited, spec_deps, provider_index, tests):
+ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, tests):
"""Merge dependency information from a Package into this Spec.
Args:
@@ -3225,23 +3198,19 @@ class Spec(object):
else:
# merge package/vdep information into spec
try:
- tty.debug(
- "{0} applying constraint {1}".format(self.name, str(dep)))
+ tty.debug("{0} applying constraint {1}".format(self.name, str(dep)))
changed |= spec_deps[dep.name].constrain(dep)
except spack.error.UnsatisfiableSpecError as e:
- fmt = 'An unsatisfiable {0}'.format(e.constraint_type)
- fmt += ' constraint has been detected for spec:'
- fmt += '\n\n{0}\n\n'.format(spec_deps[dep.name].tree(indent=4))
- fmt += 'while trying to concretize the partial spec:'
- fmt += '\n\n{0}\n\n'.format(self.tree(indent=4))
- fmt += '{0} requires {1} {2} {3}, but spec asked for {4}'
+ fmt = "An unsatisfiable {0}".format(e.constraint_type)
+ fmt += " constraint has been detected for spec:"
+ fmt += "\n\n{0}\n\n".format(spec_deps[dep.name].tree(indent=4))
+ fmt += "while trying to concretize the partial spec:"
+ fmt += "\n\n{0}\n\n".format(self.tree(indent=4))
+ fmt += "{0} requires {1} {2} {3}, but spec asked for {4}"
e.message = fmt.format(
- self.name,
- dep.name,
- e.constraint_type,
- e.required,
- e.provided)
+ self.name, dep.name, e.constraint_type, e.required, e.provided
+ )
raise
@@ -3250,8 +3219,7 @@ class Spec(object):
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, dependency.type)
- changed |= spec_dependency._normalize_helper(
- visited, spec_deps, provider_index, tests)
+ changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
def _normalize_helper(self, visited, spec_deps, provider_index, tests):
@@ -3286,37 +3254,40 @@ class Spec(object):
if dep:
merge = (
# caller requested test dependencies
- tests is True or (tests and self.name in tests) or
+ tests is True
+ or (tests and self.name in tests)
+ or
# this is not a test-only dependency
- dep.type - set(['test']))
+ dep.type - set(["test"])
+ )
if merge:
changed |= self._merge_dependency(
- dep, visited, spec_deps, provider_index, tests)
+ dep, visited, spec_deps, provider_index, tests
+ )
any_change |= changed
return any_change
def normalize(self, force=False, tests=False, user_spec_deps=None):
"""When specs are parsed, any dependencies specified are hanging off
- the root, and ONLY the ones that were explicitly provided are there.
- Normalization turns a partial flat spec into a DAG, where:
+ the root, and ONLY the ones that were explicitly provided are there.
+ Normalization turns a partial flat spec into a DAG, where:
- 1. Known dependencies of the root package are in the DAG.
- 2. Each node's dependencies dict only contains its known direct
- deps.
- 3. There is only ONE unique spec for each package in the DAG.
+ 1. Known dependencies of the root package are in the DAG.
+ 2. Each node's dependencies dict only contains its known direct
+ deps.
+ 3. There is only ONE unique spec for each package in the DAG.
- * This includes virtual packages. If there a non-virtual
- package that provides a virtual package that is in the spec,
- then we replace the virtual package with the non-virtual one.
+ * This includes virtual packages. If there a non-virtual
+ package that provides a virtual package that is in the spec,
+ then we replace the virtual package with the non-virtual one.
- TODO: normalize should probably implement some form of cycle
- detection, to ensure that the spec is actually a DAG.
+ TODO: normalize should probably implement some form of cycle
+ detection, to ensure that the spec is actually a DAG.
"""
if not self.name:
- raise spack.error.SpecError(
- "Attempting to normalize anonymous spec")
+ raise spack.error.SpecError("Attempting to normalize anonymous spec")
# Set _normal and _concrete to False when forced
if force and not self._concrete:
@@ -3351,14 +3322,14 @@ class Spec(object):
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
provider_index = spack.provider_index.ProviderIndex(
- [s for s in all_spec_deps.values()], restrict=True)
+ [s for s in all_spec_deps.values()], restrict=True
+ )
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set()
- any_change = self._normalize_helper(
- visited, all_spec_deps, provider_index, tests)
+ any_change = self._normalize_helper(visited, all_spec_deps, provider_index, tests)
# Mark the spec as normal once done.
self._normal = True
@@ -3441,8 +3412,9 @@ class Spec(object):
for value in values:
if self.variants.get(variant_name):
- msg = ("Cannot append a value to a single-valued "
- "variant with an already set value")
+ msg = (
+ "Cannot append a value to a single-valued " "variant with an already set value"
+ )
assert pkg_variant.multi, msg
self.variants[variant_name].append(value)
else:
@@ -3464,20 +3436,18 @@ class Spec(object):
if self.satisfies(other):
return False
else:
- raise spack.error.UnsatisfiableSpecError(
- self, other, 'constrain a concrete spec'
- )
+ raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
other = self._autospec(other)
- if not (self.name == other.name or
- (not self.name) or
- (not other.name)):
+ if not (self.name == other.name or (not self.name) or (not other.name)):
raise UnsatisfiableSpecNameError(self.name, other.name)
- if (other.namespace is not None and
- self.namespace is not None and
- other.namespace != self.namespace):
+ if (
+ other.namespace is not None
+ and self.namespace is not None
+ and other.namespace != self.namespace
+ ):
raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
if not self.versions.overlaps(other.versions):
@@ -3485,9 +3455,7 @@ class Spec(object):
for v in [x for x in other.variants if x in self.variants]:
if not self.variants[v].compatible(other.variants[v]):
- raise vt.UnsatisfiableVariantSpecError(
- self.variants[v], other.variants[v]
- )
+ raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
# TODO: Check out the logic here
sarch, oarch = self.architecture, other.architecture
@@ -3515,7 +3483,7 @@ class Spec(object):
if self.compiler is not None and other.compiler is not None:
changed |= self.compiler.constrain(other.compiler)
elif self.compiler is None:
- changed |= (self.compiler != other.compiler)
+ changed |= self.compiler != other.compiler
self.compiler = other.compiler
changed |= self.versions.intersect(other.versions)
@@ -3534,7 +3502,7 @@ class Spec(object):
sarch.os = sarch.os or oarch.os
if sarch.target is None or oarch.target is None:
sarch.target = sarch.target or oarch.target
- changed |= (str(self.architecture) != old)
+ changed |= str(self.architecture) != old
if deps:
changed |= self._constrain_dependencies(other)
@@ -3570,7 +3538,8 @@ class Spec(object):
# WARNING: one edge from package "name"
edges_from_name = self._dependencies[name]
changed |= edges_from_name[0].update_deptypes(
- other._dependencies[name][0].deptypes)
+ other._dependencies[name][0].deptypes
+ )
# Update with additional constraints from other spec
for name in other.dep_difference(self):
@@ -3584,10 +3553,8 @@ class Spec(object):
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
- common = set(
- s.name for s in self.traverse(root=False))
- common.intersection_update(
- s.name for s in other.traverse(root=False))
+ common = set(s.name for s in self.traverse(root=False))
+ common.intersection_update(s.name for s in other.traverse(root=False))
return common
def constrained(self, other, deps=True):
@@ -3599,8 +3566,7 @@ class Spec(object):
def dep_difference(self, other):
"""Returns dependencies in self that are not in other."""
mine = set(s.name for s in self.traverse(root=False))
- mine.difference_update(
- s.name for s in other.traverse(root=False))
+ mine.difference_update(s.name for s in other.traverse(root=False))
return mine
def _autospec(self, spec_like):
@@ -3648,16 +3614,19 @@ class Spec(object):
if pkg.provides(other.name):
for provided, when_specs in pkg.provided.items():
- if any(self.satisfies(when, deps=False, strict=strict)
- for when in when_specs):
+ if any(
+ self.satisfies(when, deps=False, strict=strict) for when in when_specs
+ ):
if provided.satisfies(other):
return True
return False
# namespaces either match, or other doesn't require one.
- if (other.namespace is not None and
- self.namespace is not None and
- self.namespace != other.namespace):
+ if (
+ other.namespace is not None
+ and self.namespace is not None
+ and self.namespace != other.namespace
+ ):
return False
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
@@ -3686,9 +3655,7 @@ class Spec(object):
elif strict and (other.architecture and not self.architecture):
return False
- if not self.compiler_flags.satisfies(
- other.compiler_flags,
- strict=strict):
+ if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
return False
# If we need to descend into dependencies, do it, otherwise we're done.
@@ -3719,8 +3686,7 @@ class Spec(object):
# use list to prevent double-iteration
selfdeps = list(self.traverse(root=False))
otherdeps = list(other.traverse(root=False))
- if not all(any(d.satisfies(dep, strict=True) for d in selfdeps)
- for dep in otherdeps):
+ if not all(any(d.satisfies(dep, strict=True) for d in selfdeps) for dep in otherdeps):
return False
elif not self._dependencies:
@@ -3734,10 +3700,8 @@ class Spec(object):
return False
# For virtual dependencies, we need to dig a little deeper.
- self_index = spack.provider_index.ProviderIndex(
- self.traverse(), restrict=True)
- other_index = spack.provider_index.ProviderIndex(
- other.traverse(), restrict=True)
+ self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
+ other_index = spack.provider_index.ProviderIndex(other.traverse(), restrict=True)
# This handles cases where there are already providers for both vpkgs
if not self_index.satisfies(other_index):
@@ -3747,8 +3711,7 @@ class Spec(object):
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2)
for spec in self.virtual_dependencies():
- if (spec.name in other_index and
- not other_index.providers_for(spec)):
+ if spec.name in other_index and not other_index.providers_for(spec):
return False
for spec in other.virtual_dependencies():
@@ -3807,25 +3770,26 @@ class Spec(object):
"""
# We don't count dependencies as changes here
changed = True
- if hasattr(self, 'name'):
- changed = (self.name != other.name and
- self.versions != other.versions and
- self.architecture != other.architecture and
- self.compiler != other.compiler and
- self.variants != other.variants and
- self._normal != other._normal and
- self.concrete != other.concrete and
- self.external_path != other.external_path and
- self.external_modules != other.external_modules and
- self.compiler_flags != other.compiler_flags)
+ if hasattr(self, "name"):
+ changed = (
+ self.name != other.name
+ and self.versions != other.versions
+ and self.architecture != other.architecture
+ and self.compiler != other.compiler
+ and self.variants != other.variants
+ and self._normal != other._normal
+ and self.concrete != other.concrete
+ and self.external_path != other.external_path
+ and self.external_modules != other.external_modules
+ and self.compiler_flags != other.compiler_flags
+ )
self._package = None
# Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
- self.architecture = other.architecture.copy() if other.architecture \
- else None
+ self.architecture = other.architecture.copy() if other.architecture else None
self.compiler = other.compiler.copy() if other.compiler else None
if cleardeps:
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
@@ -3839,7 +3803,7 @@ class Spec(object):
# to keep it from leaking out of spec.py, but we should figure
# out how to handle it more elegantly in the Variant classes.
for k, v in other.variants.items():
- patches = getattr(v, '_patches_in_order_of_appearance', None)
+ patches = getattr(v, "_patches_in_order_of_appearance", None)
if patches:
self.variants[k]._patches_in_order_of_appearance = patches
@@ -3880,14 +3844,12 @@ class Spec(object):
return id(spec)
new_specs = {spid(other): self}
- for edge in other.traverse_edges(cover='edges', root=False):
+ for edge in other.traverse_edges(cover="edges", root=False):
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
continue
if spid(edge.parent) not in new_specs:
- new_specs[spid(edge.parent)] = edge.parent.copy(
- deps=False
- )
+ new_specs[spid(edge.parent)] = edge.parent.copy(deps=False)
if spid(edge.spec) not in new_specs:
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
@@ -3931,8 +3893,7 @@ class Spec(object):
@property
def version(self):
if not self.versions.concrete:
- raise spack.error.SpecError(
- "Spec version is not concrete: " + str(self))
+ raise spack.error.SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
def __getitem__(self, name):
@@ -3944,10 +3905,10 @@ class Spec(object):
Note that if a virtual package is queried a copy of the Spec is
returned while for non-virtual a reference is returned.
"""
- query_parameters = name.split(':')
+ query_parameters = name.split(":")
if len(query_parameters) > 2:
- msg = 'key has more than one \':\' symbol.'
- msg += ' At most one is admitted.'
+ msg = "key has more than one ':' symbol."
+ msg += " At most one is admitted."
raise KeyError(msg)
name, query_parameters = query_parameters[0], query_parameters[1:]
@@ -3955,15 +3916,14 @@ class Spec(object):
# We have extra query parameters, which are comma separated
# values
csv = query_parameters.pop().strip()
- query_parameters = re.split(r'\s*,\s*', csv)
+ query_parameters = re.split(r"\s*,\s*", csv)
try:
value = next(
itertools.chain(
# Regular specs
(x for x in self.traverse() if x.name == name),
- (x for x in self.traverse()
- if (not x.virtual) and x.package.provides(name))
+ (x for x in self.traverse() if (not x.virtual) and x.package.provides(name)),
)
)
except StopIteration:
@@ -4007,13 +3967,10 @@ class Spec(object):
if len(self._dependencies) != len(other._dependencies):
return False
- ssorted = [self._dependencies[name]
- for name in sorted(self._dependencies)]
- osorted = [other._dependencies[name]
- for name in sorted(other._dependencies)]
+ ssorted = [self._dependencies[name] for name in sorted(self._dependencies)]
+ osorted = [other._dependencies[name] for name in sorted(other._dependencies)]
for s_dspec, o_dspec in zip(
- itertools.chain.from_iterable(ssorted),
- itertools.chain.from_iterable(osorted)
+ itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted)
):
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
return False
@@ -4048,9 +4005,7 @@ class Spec(object):
def eq_node(self, other):
"""Equality with another spec, not including dependencies."""
- return (other is not None) and lang.lazy_eq(
- self._cmp_node, other._cmp_node
- )
+ return (other is not None) and lang.lazy_eq(self._cmp_node, other._cmp_node)
def _cmp_iter(self):
"""Lazily yield components of self for comparison."""
@@ -4058,12 +4013,11 @@ class Spec(object):
yield item
def deps():
- for dep in sorted(
- itertools.chain.from_iterable(self._dependencies.values())
- ):
+ for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
yield dep.spec.name
yield tuple(sorted(dep.deptypes))
yield hash(dep.spec)
+
yield deps
def colorized(self):
@@ -4149,69 +4103,68 @@ class Spec(object):
"""
# If we have an unescaped $ sigil, use the deprecated format strings
- if re.search(r'[^\\]*\$', format_string):
+ if re.search(r"[^\\]*\$", format_string):
return self.old_format(format_string, **kwargs)
- color = kwargs.get('color', False)
- transform = kwargs.get('transform', {})
+ color = kwargs.get("color", False)
+ transform = kwargs.get("transform", {})
out = six.StringIO()
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
- f = color_formats[c] + f + '@.'
+ f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
def write_attribute(spec, attribute, color):
current = spec
- if attribute.startswith('^'):
+ if attribute.startswith("^"):
attribute = attribute[1:]
- dep, attribute = attribute.split('.', 1)
+ dep, attribute = attribute.split(".", 1)
current = self[dep]
- if attribute == '':
- raise SpecFormatStringError(
- 'Format string attributes must be non-empty')
+ if attribute == "":
+ raise SpecFormatStringError("Format string attributes must be non-empty")
attribute = attribute.lower()
- sig = ''
- if attribute[0] in '@%/':
+ sig = ""
+ if attribute[0] in "@%/":
# color sigils that are inside braces
sig = attribute[0]
attribute = attribute[1:]
- elif attribute.startswith('arch='):
- sig = ' arch=' # include space as separator
+ elif attribute.startswith("arch="):
+ sig = " arch=" # include space as separator
attribute = attribute[5:]
- parts = attribute.split('.')
+ parts = attribute.split(".")
assert parts
# check that the sigil is valid for the attribute.
- if sig == '@' and parts[-1] not in ('versions', 'version'):
- raise SpecFormatSigilError(sig, 'versions', attribute)
- elif sig == '%' and attribute not in ('compiler', 'compiler.name'):
- raise SpecFormatSigilError(sig, 'compilers', attribute)
- elif sig == '/' and not re.match(r'hash(:\d+)?$', attribute):
- raise SpecFormatSigilError(sig, 'DAG hashes', attribute)
- elif sig == ' arch=' and attribute not in ('architecture', 'arch'):
- raise SpecFormatSigilError(sig, 'the architecture', attribute)
+ if sig == "@" and parts[-1] not in ("versions", "version"):
+ raise SpecFormatSigilError(sig, "versions", attribute)
+ elif sig == "%" and attribute not in ("compiler", "compiler.name"):
+ raise SpecFormatSigilError(sig, "compilers", attribute)
+ elif sig == "/" and not re.match(r"hash(:\d+)?$", attribute):
+ raise SpecFormatSigilError(sig, "DAG hashes", attribute)
+ elif sig == " arch=" and attribute not in ("architecture", "arch"):
+ raise SpecFormatSigilError(sig, "the architecture", attribute)
# find the morph function for our attribute
morph = transform.get(attribute, lambda s, x: x)
# Special cases for non-spec attributes and hashes.
# These must be the only non-dep component of the format attribute
- if attribute == 'spack_root':
+ if attribute == "spack_root":
write(morph(spec, spack.paths.spack_root))
return
- elif attribute == 'spack_install':
+ elif attribute == "spack_install":
write(morph(spec, spack.store.layout.root))
return
- elif re.match(r'hash(:\d)?', attribute):
- col = '#'
- if ':' in attribute:
- _, length = attribute.split(':')
+ elif re.match(r"hash(:\d)?", attribute):
+ col = "#"
+ if ":" in attribute:
+ _, length = attribute.split(":")
write(sig + morph(spec, spec.dag_hash(int(length))), col)
else:
write(sig + morph(spec, spec.dag_hash()), col)
@@ -4220,31 +4173,27 @@ class Spec(object):
# Iterate over components using getattr to get next element
for idx, part in enumerate(parts):
if not part:
- raise SpecFormatStringError(
- 'Format string attributes must be non-empty'
- )
- if part.startswith('_'):
- raise SpecFormatStringError(
- 'Attempted to format private attribute'
- )
+ raise SpecFormatStringError("Format string attributes must be non-empty")
+ if part.startswith("_"):
+ raise SpecFormatStringError("Attempted to format private attribute")
else:
if isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
else:
# aliases
- if part == 'arch':
- part = 'architecture'
- elif part == 'version':
+ if part == "arch":
+ part = "architecture"
+ elif part == "version":
# Version requires concrete spec, versions does not
# when concrete, they print the same thing
- part = 'versions'
+ part = "versions"
try:
current = getattr(current, part)
except AttributeError:
- parent = '.'.join(parts[:idx])
- m = 'Attempted to format attribute %s.' % attribute
- m += 'Spec.%s has no attribute %s' % (parent, part)
+ parent = ".".join(parts[:idx])
+ m = "Attempted to format attribute %s." % attribute
+ m += "Spec.%s has no attribute %s" % (parent, part)
raise SpecFormatStringError(m)
if isinstance(current, vn.VersionList):
if current == _any_version:
@@ -4252,28 +4201,26 @@ class Spec(object):
return
if callable(current):
- raise SpecFormatStringError(
- 'Attempted to format callable object'
- )
+ raise SpecFormatStringError("Attempted to format callable object")
if not current:
# We're not printing anything
return
# Set color codes for various attributes
col = None
- if 'variants' in parts:
- col = '+'
- elif 'architecture' in parts:
- col = '='
- elif 'compiler' in parts or 'compiler_flags' in parts:
- col = '%'
- elif 'version' in parts:
- col = '@'
+ if "variants" in parts:
+ col = "+"
+ elif "architecture" in parts:
+ col = "="
+ elif "compiler" in parts or "compiler_flags" in parts:
+ col = "%"
+ elif "version" in parts:
+ col = "@"
# Finally, write the output
write(sig + morph(spec, str(current)), col)
- attribute = ''
+ attribute = ""
in_attribute = False
escape = False
@@ -4281,34 +4228,31 @@ class Spec(object):
if escape:
out.write(c)
escape = False
- elif c == '\\':
+ elif c == "\\":
escape = True
elif in_attribute:
- if c == '}':
+ if c == "}":
write_attribute(self, attribute, color)
- attribute = ''
+ attribute = ""
in_attribute = False
else:
attribute += c
else:
- if c == '}':
- raise SpecFormatStringError(
- 'Encountered closing } before opening {'
- )
- elif c == '{':
+ if c == "}":
+ raise SpecFormatStringError("Encountered closing } before opening {")
+ elif c == "{":
in_attribute = True
else:
out.write(c)
if in_attribute:
raise SpecFormatStringError(
- 'Format string terminated while reading attribute.'
- 'Missing terminating }.'
+ "Format string terminated while reading attribute." "Missing terminating }."
)
formatted_spec = out.getvalue()
return formatted_spec.strip()
- def old_format(self, format_string='$_$@$%@+$+$=', **kwargs):
+ def old_format(self, format_string="$_$@$%@+$+$=", **kwargs):
"""
The format strings you can provide are::
@@ -4382,77 +4326,79 @@ class Spec(object):
TODO: allow, e.g., ``$//`` for full hash.
"""
- color = kwargs.get('color', False)
+ color = kwargs.get("color", False)
# Dictionary of transformations for named tokens
- token_transforms = dict(
- (k.upper(), v) for k, v in kwargs.get('transform', {}).items())
+ token_transforms = dict((k.upper(), v) for k, v in kwargs.get("transform", {}).items())
length = len(format_string)
out = six.StringIO()
named = escape = compiler = False
- named_str = fmt = ''
+ named_str = fmt = ""
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
- f = color_formats[c] + f + '@.'
+ f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
iterator = enumerate(format_string)
for i, c in iterator:
if escape:
- fmt = '%'
- if c == '-':
+ fmt = "%"
+ if c == "-":
fmt += c
i, c = next(iterator)
- while c in '0123456789':
+ while c in "0123456789":
fmt += c
i, c = next(iterator)
- fmt += 's'
+ fmt += "s"
- if c == '_':
- name = self.name if self.name else ''
+ if c == "_":
+ name = self.name if self.name else ""
out.write(fmt % name)
- elif c == '.':
- name = self.fullname if self.fullname else ''
+ elif c == ".":
+ name = self.fullname if self.fullname else ""
out.write(fmt % name)
- elif c == '@':
+ elif c == "@":
if self.versions and self.versions != _any_version:
write(fmt % (c + str(self.versions)), c)
- elif c == '%':
+ elif c == "%":
if self.compiler:
write(fmt % (c + str(self.compiler.name)), c)
compiler = True
- elif c == '+':
+ elif c == "+":
if self.variants:
write(fmt % str(self.variants), c)
- elif c == '=':
+ elif c == "=":
if self.architecture and str(self.architecture):
- a_str = ' arch' + c + str(self.architecture) + ' '
+ a_str = " arch" + c + str(self.architecture) + " "
write(fmt % (a_str), c)
- elif c == '/':
- out.write('/' + fmt % (self.dag_hash(7)))
- elif c == '$':
- if fmt != '%s':
+ elif c == "/":
+ out.write("/" + fmt % (self.dag_hash(7)))
+ elif c == "$":
+ if fmt != "%s":
raise ValueError("Can't use format width with $$.")
- out.write('$')
- elif c == '{':
+ out.write("$")
+ elif c == "{":
named = True
- named_str = ''
+ named_str = ""
escape = False
elif compiler:
- if c == '@':
- if (self.compiler and self.compiler.versions and
- self.compiler.versions != _any_version):
- write(c + str(self.compiler.versions), '%')
- elif c == '+':
+ if c == "@":
+ if (
+ self.compiler
+ and self.compiler.versions
+ and self.compiler.versions != _any_version
+ ):
+ write(c + str(self.compiler.versions), "%")
+ elif c == "+":
if self.compiler_flags:
- write(fmt % str(self.compiler_flags), '%')
+ write(fmt % str(self.compiler_flags), "%")
compiler = False
- elif c == '$':
+ elif c == "$":
escape = True
compiler = False
else:
@@ -4460,10 +4406,11 @@ class Spec(object):
compiler = False
elif named:
- if not c == '}':
+ if not c == "}":
if i == length - 1:
- raise ValueError("Error: unterminated ${ in format:"
- "'%s'" % format_string)
+ raise ValueError(
+ "Error: unterminated ${ in format:" "'%s'" % format_string
+ )
named_str += c
continue
named_str = named_str.upper()
@@ -4474,82 +4421,72 @@ class Spec(object):
# (`lambda x: x` is the identity function)
transform = token_transforms.get(named_str, lambda s, x: x)
- if named_str == 'PACKAGE':
- name = self.name if self.name else ''
+ if named_str == "PACKAGE":
+ name = self.name if self.name else ""
write(fmt % transform(self, name))
- elif named_str == 'FULLPACKAGE':
- name = self.fullname if self.fullname else ''
+ elif named_str == "FULLPACKAGE":
+ name = self.fullname if self.fullname else ""
write(fmt % transform(self, name))
- elif named_str == 'VERSION':
+ elif named_str == "VERSION":
if self.versions and self.versions != _any_version:
- write(fmt % transform(self, str(self.versions)), '@')
- elif named_str == 'COMPILER':
+ write(fmt % transform(self, str(self.versions)), "@")
+ elif named_str == "COMPILER":
if self.compiler:
- write(fmt % transform(self, self.compiler), '%')
- elif named_str == 'COMPILERNAME':
+ write(fmt % transform(self, self.compiler), "%")
+ elif named_str == "COMPILERNAME":
if self.compiler:
- write(fmt % transform(self, self.compiler.name), '%')
- elif named_str in ['COMPILERVER', 'COMPILERVERSION']:
+ write(fmt % transform(self, self.compiler.name), "%")
+ elif named_str in ["COMPILERVER", "COMPILERVERSION"]:
if self.compiler:
- write(
- fmt % transform(self, self.compiler.versions),
- '%'
- )
- elif named_str == 'COMPILERFLAGS':
+ write(fmt % transform(self, self.compiler.versions), "%")
+ elif named_str == "COMPILERFLAGS":
if self.compiler:
- write(
- fmt % transform(self, str(self.compiler_flags)),
- '%'
- )
- elif named_str == 'OPTIONS':
+ write(fmt % transform(self, str(self.compiler_flags)), "%")
+ elif named_str == "OPTIONS":
if self.variants:
- write(fmt % transform(self, str(self.variants)), '+')
+ write(fmt % transform(self, str(self.variants)), "+")
elif named_str in ["ARCHITECTURE", "PLATFORM", "TARGET", "OS"]:
if self.architecture and str(self.architecture):
if named_str == "ARCHITECTURE":
- write(
- fmt % transform(self, str(self.architecture)),
- '='
- )
+ write(fmt % transform(self, str(self.architecture)), "=")
elif named_str == "PLATFORM":
platform = str(self.architecture.platform)
- write(fmt % transform(self, platform), '=')
+ write(fmt % transform(self, platform), "=")
elif named_str == "OS":
operating_sys = str(self.architecture.os)
- write(fmt % transform(self, operating_sys), '=')
+ write(fmt % transform(self, operating_sys), "=")
elif named_str == "TARGET":
target = str(self.architecture.target)
- write(fmt % transform(self, target), '=')
- elif named_str == 'SHA1':
+ write(fmt % transform(self, target), "=")
+ elif named_str == "SHA1":
if self.dependencies:
out.write(fmt % transform(self, str(self.dag_hash(7))))
- elif named_str == 'SPACK_ROOT':
+ elif named_str == "SPACK_ROOT":
out.write(fmt % transform(self, spack.paths.prefix))
- elif named_str == 'SPACK_INSTALL':
+ elif named_str == "SPACK_INSTALL":
out.write(fmt % transform(self, spack.store.root))
- elif named_str == 'PREFIX':
+ elif named_str == "PREFIX":
out.write(fmt % transform(self, self.prefix))
- elif named_str.startswith('HASH'):
- if named_str.startswith('HASH:'):
- _, hashlen = named_str.split(':')
+ elif named_str.startswith("HASH"):
+ if named_str.startswith("HASH:"):
+ _, hashlen = named_str.split(":")
hashlen = int(hashlen)
else:
hashlen = None
out.write(fmt % (self.dag_hash(hashlen)))
- elif named_str == 'NAMESPACE':
+ elif named_str == "NAMESPACE":
out.write(fmt % transform(self, self.namespace))
- elif named_str.startswith('DEP:'):
- _, dep_name, dep_option = named_str.lower().split(':', 2)
+ elif named_str.startswith("DEP:"):
+ _, dep_name, dep_option = named_str.lower().split(":", 2)
dep_spec = self[dep_name]
- out.write(fmt % (dep_spec.format('${%s}' % dep_option)))
+ out.write(fmt % (dep_spec.format("${%s}" % dep_option)))
named = False
- elif c == '$':
+ elif c == "$":
escape = True
if i == length - 1:
- raise ValueError("Error: unterminated $ in format: '%s'"
- % format_string)
+ raise ValueError("Error: unterminated $ in format: '%s'" % format_string)
else:
out.write(c)
@@ -4559,13 +4496,11 @@ class Spec(object):
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
kwargs = kwargs.copy()
- kwargs.setdefault('color', None)
+ kwargs.setdefault("color", None)
return self.format(*args, **kwargs)
def __str__(self):
- sorted_nodes = [self] + sorted(
- self.traverse(root=False), key=lambda x: x.name
- )
+ sorted_nodes = [self] + sorted(self.traverse(root=False), key=lambda x: x.name)
spec_str = " ^".join(d.format() for d in sorted_nodes)
return spec_str.strip()
@@ -4591,24 +4526,25 @@ class Spec(object):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
- with indentation."""
- color = kwargs.pop('color', clr.get_color_when())
- depth = kwargs.pop('depth', False)
- hashes = kwargs.pop('hashes', False)
- hlen = kwargs.pop('hashlen', None)
- status_fn = kwargs.pop('status_fn', False)
- cover = kwargs.pop('cover', 'nodes')
- indent = kwargs.pop('indent', 0)
- fmt = kwargs.pop('format', default_format)
- prefix = kwargs.pop('prefix', None)
- show_types = kwargs.pop('show_types', False)
- deptypes = kwargs.pop('deptypes', 'all')
- recurse_dependencies = kwargs.pop('recurse_dependencies', True)
+ with indentation."""
+ color = kwargs.pop("color", clr.get_color_when())
+ depth = kwargs.pop("depth", False)
+ hashes = kwargs.pop("hashes", False)
+ hlen = kwargs.pop("hashlen", None)
+ status_fn = kwargs.pop("status_fn", False)
+ cover = kwargs.pop("cover", "nodes")
+ indent = kwargs.pop("indent", 0)
+ fmt = kwargs.pop("format", default_format)
+ prefix = kwargs.pop("prefix", None)
+ show_types = kwargs.pop("show_types", False)
+ deptypes = kwargs.pop("deptypes", "all")
+ recurse_dependencies = kwargs.pop("recurse_dependencies", True)
lang.check_kwargs(kwargs, self.tree)
out = ""
for d, dep_spec in self.traverse_edges(
- order='pre', cover=cover, depth=True, deptype=deptypes):
+ order="pre", cover=cover, depth=True, deptype=deptypes
+ ):
node = dep_spec.spec
if prefix is not None:
@@ -4630,25 +4566,22 @@ class Spec(object):
out += clr.colorize("@r{[-]} ", color=color) # missing
if hashes:
- out += clr.colorize(
- '@K{%s} ', color=color) % node.dag_hash(hlen)
+ out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
if show_types:
- if cover == 'nodes':
+ if cover == "nodes":
# when only covering nodes, we merge dependency types
# from all dependents before showing them.
- types = [
- ds.deptypes for ds in node.edges_from_dependents()
- ]
+ types = [ds.deptypes for ds in node.edges_from_dependents()]
else:
# when covering edges or paths, we show dependency
# types only for the edge through which we visited
types = [dep_spec.deptypes]
type_chars = dp.deptype_chars(*types)
- out += '[%s] ' % type_chars
+ out += "[%s] " % type_chars
- out += (" " * d)
+ out += " " * d
if d > 0:
out += "^"
out += node.format(fmt, color=color) + "\n"
@@ -4709,8 +4642,7 @@ class Spec(object):
assert self.concrete
assert other.concrete
- virtuals_to_replace = [v.name for v in other.package.virtuals_provided
- if v in self]
+ virtuals_to_replace = [v.name for v in other.package.virtuals_provided if v in self]
if virtuals_to_replace:
deps_to_replace = dict((self[v], other) for v in virtuals_to_replace)
# deps_to_replace = [self[v] for v in virtuals_to_replace]
@@ -4720,19 +4652,31 @@ class Spec(object):
# deps_to_replace = [self[other.name]]
for d in deps_to_replace:
- if not all(v in other.package.virtuals_provided or v not in self
- for v in d.package.virtuals_provided):
+ if not all(
+ v in other.package.virtuals_provided or v not in self
+ for v in d.package.virtuals_provided
+ ):
# There was something provided by the original that we don't
# get from its replacement.
- raise SpliceError(("Splice between {0} and {1} will not provide "
- "the same virtuals.").format(self.name, other.name))
+ raise SpliceError(
+ ("Splice between {0} and {1} will not provide " "the same virtuals.").format(
+ self.name, other.name
+ )
+ )
for n in d.traverse(root=False):
- if not all(any(v in other_n.package.virtuals_provided
- for other_n in other.traverse(root=False))
- or v not in self for v in n.package.virtuals_provided):
- raise SpliceError(("Splice between {0} and {1} will not provide "
- "the same virtuals."
- ).format(self.name, other.name))
+ if not all(
+ any(
+ v in other_n.package.virtuals_provided
+ for other_n in other.traverse(root=False)
+ )
+ or v not in self
+ for v in n.package.virtuals_provided
+ ):
+ raise SpliceError(
+ (
+ "Splice between {0} and {1} will not provide " "the same virtuals."
+ ).format(self.name, other.name)
+ )
# For now, check that we don't have DAG with multiple specs from the
# same package
@@ -4742,8 +4686,10 @@ class Spec(object):
return max_number > 1
if multiple_specs(self) or multiple_specs(other):
- msg = ('Either "{0}" or "{1}" contain multiple specs from the same '
- 'package, which cannot be handled by splicing at the moment')
+ msg = (
+ 'Either "{0}" or "{1}" contain multiple specs from the same '
+ "package, which cannot be handled by splicing at the moment"
+ )
raise ValueError(msg.format(self, other))
# Multiple unique specs with the same name will collide, so the
@@ -4763,23 +4709,28 @@ class Spec(object):
else:
if name == other.name:
return False
- if any(v in other.package.virtuals_provided
- for v in self[name].package.virtuals_provided):
+ if any(
+ v in other.package.virtuals_provided
+ for v in self[name].package.virtuals_provided
+ ):
return False
return True
- self_nodes = dict((s.name, s.copy(deps=False))
- for s in self.traverse(root=True)
- if from_self(s.name, transitive))
+ self_nodes = dict(
+ (s.name, s.copy(deps=False))
+ for s in self.traverse(root=True)
+ if from_self(s.name, transitive)
+ )
if transitive:
- other_nodes = dict((s.name, s.copy(deps=False))
- for s in other.traverse(root=True))
+ other_nodes = dict((s.name, s.copy(deps=False)) for s in other.traverse(root=True))
else:
# NOTE: Does not fully validate providers; loader races possible
- other_nodes = dict((s.name, s.copy(deps=False))
- for s in other.traverse(root=True)
- if s is other or s.name not in self)
+ other_nodes = dict(
+ (s.name, s.copy(deps=False))
+ for s in other.traverse(root=True)
+ if s is other or s.name not in self
+ )
nodes = other_nodes.copy()
nodes.update(self_nodes)
@@ -4788,30 +4739,24 @@ class Spec(object):
if name in self_nodes:
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
- nodes[name].add_dependency_edge(
- nodes[dep_name], edge.deptypes
- )
- if any(dep not in self_nodes
- for dep in self[name]._dependencies):
+ nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
+ if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
- nodes[name].add_dependency_edge(
- nodes[edge.spec.name], edge.deptypes
- )
- if any(dep not in other_nodes
- for dep in other[name]._dependencies):
+ nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
+ if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
ret = nodes[self.name]
# Clear cached hashes for all affected nodes
# Do not touch unaffected nodes
- for dep in ret.traverse(root=True, order='post'):
+ for dep in ret.traverse(root=True, order="post"):
opposite = other_nodes if dep.name in self_nodes else self_nodes
if any(name in dep for name in opposite.keys()):
# package hash cannot be affected by splice
- dep.clear_cached_hashes(ignore=['package_hash'])
+ dep.clear_cached_hashes(ignore=["package_hash"])
dep.dag_hash()
@@ -4867,9 +4812,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
merged_spec.constrain(current_spec_constraint, deps=False)
for name in merged_spec.common_dependencies(current_spec_constraint):
- merged_spec[name].constrain(
- current_spec_constraint[name], deps=False
- )
+ merged_spec[name].constrain(current_spec_constraint[name], deps=False)
# Update with additional constraints from other spec
for name in current_spec_constraint.dep_difference(merged_spec):
@@ -4887,7 +4830,7 @@ def _spec_from_old_dict(data):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
- nodes = data['spec']
+ nodes = data["spec"]
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
@@ -4901,7 +4844,7 @@ def _spec_from_old_dict(data):
# get dependency dict from the node.
name = next(iter(node))
- if 'dependencies' not in node[name]:
+ if "dependencies" not in node[name]:
continue
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
@@ -4920,17 +4863,17 @@ def _spec_from_dict(data):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
- if isinstance(data['spec'], list): # Legacy specfile format
+ if isinstance(data["spec"], list): # Legacy specfile format
return _spec_from_old_dict(data)
# Current specfile format
- nodes = data['spec']['nodes']
+ nodes = data["spec"]["nodes"]
hash_type = None
any_deps = False
# Pass 0: Determine hash type
for node in nodes:
- if 'dependencies' in node.keys():
+ if "dependencies" in node.keys():
any_deps = True
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
if dhash_type:
@@ -4940,8 +4883,9 @@ def _spec_from_dict(data):
if not any_deps: # If we never see a dependency...
hash_type = ht.dag_hash.name
elif not hash_type: # Seen a dependency, still don't know hash_type
- raise spack.error.SpecError("Spec dictionary contains malformed "
- "dependencies. Old format?")
+ raise spack.error.SpecError(
+ "Spec dictionary contains malformed " "dependencies. Old format?"
+ )
hash_dict = {}
root_spec_hash = None
@@ -4951,7 +4895,7 @@ def _spec_from_dict(data):
node_hash = node[hash_type]
node_spec = Spec.from_node_dict(node)
hash_dict[node_hash] = node
- hash_dict[node_hash]['node_spec'] = node_spec
+ hash_dict[node_hash]["node_spec"] = node_spec
if i == 0:
root_spec_hash = node_hash
if not root_spec_hash:
@@ -4959,21 +4903,21 @@ def _spec_from_dict(data):
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
- node_spec = node['node_spec']
+ node_spec = node["node_spec"]
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
- node_spec._add_dependency(hash_dict[dhash]['node_spec'], dtypes)
- if 'build_spec' in node.keys():
- _, bhash, _ = Spec.build_spec_from_node_dict(node,
- hash_type=hash_type)
- node_spec._build_spec = hash_dict[bhash]['node_spec']
+ node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
+ if "build_spec" in node.keys():
+ _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
+ node_spec._build_spec = hash_dict[bhash]["node_spec"]
- return hash_dict[root_spec_hash]['node_spec']
+ return hash_dict[root_spec_hash]["node_spec"]
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily
the corresponding value ``Spec(spec_like``.
"""
+
def __init__(self):
super(LazySpecCache, self).__init__(Spec)
@@ -4987,7 +4931,7 @@ class LazySpecCache(collections.defaultdict):
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
-spec_id_re = r'\w[\w.-]*'
+spec_id_re = r"\w[\w.-]*"
class SpecLexer(spack.parse.Lexer):
@@ -4997,36 +4941,39 @@ class SpecLexer(spack.parse.Lexer):
def __init__(self):
# Spec strings require posix-style paths on Windows
# because the result is later passed to shlex
- filename_reg = r'[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*' if not is_windows\
- else r'([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*'
- super(SpecLexer, self).__init__([
- (r'\^', lambda scanner, val: self.token(DEP, val)),
- (r'\@', lambda scanner, val: self.token(AT, val)),
- (r'\:', lambda scanner, val: self.token(COLON, val)),
- (r'\,', lambda scanner, val: self.token(COMMA, val)),
- (r'\+', lambda scanner, val: self.token(ON, val)),
- (r'\-', lambda scanner, val: self.token(OFF, val)),
- (r'\~', lambda scanner, val: self.token(OFF, val)),
- (r'\%', lambda scanner, val: self.token(PCT, val)),
- (r'\=', lambda scanner, val: self.token(EQ, val)),
-
- # Filenames match before identifiers, so no initial filename
- # component is parsed as a spec (e.g., in subdir/spec.yaml/json)
- (filename_reg,
- lambda scanner, v: self.token(FILE, v)),
-
- # Hash match after filename. No valid filename can be a hash
- # (files end w/.yaml), but a hash can match a filename prefix.
- (r'/', lambda scanner, val: self.token(HASH, val)),
-
- # Identifiers match after filenames and hashes.
- (spec_id_re, lambda scanner, val: self.token(ID, val)),
-
- (r'\s+', lambda scanner, val: None)],
+ filename_reg = (
+ r"[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
+ if not is_windows
+ else r"([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
+ )
+ super(SpecLexer, self).__init__(
+ [
+ (r"\^", lambda scanner, val: self.token(DEP, val)),
+ (r"\@", lambda scanner, val: self.token(AT, val)),
+ (r"\:", lambda scanner, val: self.token(COLON, val)),
+ (r"\,", lambda scanner, val: self.token(COMMA, val)),
+ (r"\+", lambda scanner, val: self.token(ON, val)),
+ (r"\-", lambda scanner, val: self.token(OFF, val)),
+ (r"\~", lambda scanner, val: self.token(OFF, val)),
+ (r"\%", lambda scanner, val: self.token(PCT, val)),
+ (r"\=", lambda scanner, val: self.token(EQ, val)),
+ # Filenames match before identifiers, so no initial filename
+ # component is parsed as a spec (e.g., in subdir/spec.yaml/json)
+ (filename_reg, lambda scanner, v: self.token(FILE, v)),
+ # Hash match after filename. No valid filename can be a hash
+ # (files end w/.yaml), but a hash can match a filename prefix.
+ (r"/", lambda scanner, val: self.token(HASH, val)),
+ # Identifiers match after filenames and hashes.
+ (spec_id_re, lambda scanner, val: self.token(ID, val)),
+ (r"\s+", lambda scanner, val: None),
+ ],
[EQ],
- [(r'[\S].*', lambda scanner, val: self.token(VAL, val)),
- (r'\s+', lambda scanner, val: None)],
- [VAL])
+ [
+ (r"[\S].*", lambda scanner, val: self.token(VAL, val)),
+ (r"\s+", lambda scanner, val: None),
+ ],
+ [VAL],
+ )
# Lexer is always the same for every parser.
@@ -5073,8 +5020,7 @@ class SpecParser(spack.parse.Parser):
else:
if specs[-1].concrete:
# Trying to add k-v pair to spec from hash
- raise RedundantSpecError(specs[-1],
- 'key-value pair')
+ raise RedundantSpecError(specs[-1], "key-value pair")
# We should never end up here.
# This requires starting a new spec with ID, EQ
# After another spec that is not concrete
@@ -5109,7 +5055,7 @@ class SpecParser(spack.parse.Parser):
# We're finding a dependency by hash for an
# anonymous spec
dep = self.spec_by_hash()
- dep = dep.copy(deps=('link', 'run'))
+ dep = dep.copy(deps=("link", "run"))
if not dep:
# We're adding a dependency to the last spec
@@ -5119,8 +5065,7 @@ class SpecParser(spack.parse.Parser):
# This is an anonymous dep with a key=value
# push tokens to be parsed as part of the
# dep spec
- self.push_tokens(
- [self.previous, self.token])
+ self.push_tokens([self.previous, self.token])
dep_name = None
else:
# named dep (standard)
@@ -5134,7 +5079,7 @@ class SpecParser(spack.parse.Parser):
# Raise an error if the previous spec is already
# concrete (assigned by hash)
if specs[-1].concrete:
- raise RedundantSpecError(specs[-1], 'dependency')
+ raise RedundantSpecError(specs[-1], "dependency")
# command line deps get empty deptypes now.
# Real deptypes are assigned later per packages.
specs[-1]._add_dependency(dep, ())
@@ -5145,9 +5090,7 @@ class SpecParser(spack.parse.Parser):
if self.next.type in (AT, ON, OFF, PCT):
# Raise an error if the previous spec is already concrete
if specs and specs[-1].concrete:
- raise RedundantSpecError(specs[-1],
- 'compiler, version, '
- 'or variant')
+ raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
specs.append(self.spec(None))
else:
self.unexpected_token()
@@ -5160,10 +5103,7 @@ class SpecParser(spack.parse.Parser):
# Cannot do lookups for versions in anonymous specs
# Only allow Version objects to use git for now
# Note: VersionRange(x, x) is currently concrete, hence isinstance(...).
- if (
- spec.name and spec.versions.concrete and
- isinstance(spec.version, vn.GitVersion)
- ):
+ if spec.name and spec.versions.concrete and isinstance(spec.version, vn.GitVersion):
spec.version.generate_git_lookup(spec.fullname)
return specs
@@ -5198,8 +5138,7 @@ class SpecParser(spack.parse.Parser):
# regex admits text *beyond* .yaml, and we raise a nice error for
# file names that don't end in .yaml.
if not (path.endswith(".yaml") or path.endswith(".json")):
- raise SpecFilenameError(
- "Spec filename must end in .yaml or .json: '{0}'".format(path))
+ raise SpecFilenameError("Spec filename must end in .yaml or .json: '{0}'".format(path))
if not os.path.exists(path):
raise NoSuchSpecFileError("No such spec file: '{0}'".format(path))
@@ -5216,6 +5155,7 @@ class SpecParser(spack.parse.Parser):
def spec_by_hash(self):
# TODO: Remove parser dependency on active environment and database.
import spack.environment
+
self.expect(ID)
dag_hash = self.token.value
matches = []
@@ -5228,18 +5168,18 @@ class SpecParser(spack.parse.Parser):
if len(matches) != 1:
raise AmbiguousHashError(
- "Multiple packages specify hash beginning '%s'."
- % dag_hash, *matches)
+ "Multiple packages specify hash beginning '%s'." % dag_hash, *matches
+ )
return matches[0]
def spec(self, name):
"""Parse a spec out of the input. If a spec is supplied, initialize
- and return it instead of creating a new one."""
+ and return it instead of creating a new one."""
spec_namespace = None
spec_name = None
if name:
- spec_namespace, dot, spec_name = name.rpartition('.')
+ spec_namespace, dot, spec_name = name.rpartition(".")
if not spec_namespace:
spec_namespace = None
self.check_identifier(spec_name)
@@ -5352,46 +5292,44 @@ class SpecParser(spack.parse.Parser):
vlist = self.version_list()
compiler._add_versions(vlist)
else:
- compiler.versions = vn.VersionList(':')
+ compiler.versions = vn.VersionList(":")
return compiler
def check_identifier(self, id=None):
"""The only identifiers that can contain '.' are versions, but version
- ids are context-sensitive so we have to check on a case-by-case
- basis. Call this if we detect a version id where it shouldn't be.
+ ids are context-sensitive so we have to check on a case-by-case
+ basis. Call this if we detect a version id where it shouldn't be.
"""
if not id:
id = self.token.value
- if '.' in id:
- self.last_token_error(
- "{0}: Identifier cannot contain '.'".format(id))
+ if "." in id:
+ self.last_token_error("{0}: Identifier cannot contain '.'".format(id))
def parse(string):
"""Returns a list of specs from an input string.
- For creating one spec, see Spec() constructor.
+ For creating one spec, see Spec() constructor.
"""
return SpecParser().parse(string)
def save_dependency_specfiles(
- root_spec_info, output_directory, dependencies=None,
- spec_format='json'):
+ root_spec_info, output_directory, dependencies=None, spec_format="json"
+):
"""Given a root spec (represented as a yaml object), index it with a subset
- of its dependencies, and write each dependency to a separate yaml file
- in the output directory. By default, all dependencies will be written
- out. To choose a smaller subset of dependencies to be written, pass a
- list of package names in the dependencies parameter. If the format of the
- incoming spec is not json, that can be specified with the spec_format
- parameter. This can be used to convert from yaml specfiles to the
- json format."""
- if spec_format == 'json':
+ of its dependencies, and write each dependency to a separate yaml file
+ in the output directory. By default, all dependencies will be written
+ out. To choose a smaller subset of dependencies to be written, pass a
+ list of package names in the dependencies parameter. If the format of the
+ incoming spec is not json, that can be specified with the spec_format
+ parameter. This can be used to convert from yaml specfiles to the
+ json format."""
+ if spec_format == "json":
root_spec = Spec.from_json(root_spec_info)
- elif spec_format == 'yaml':
+ elif spec_format == "yaml":
root_spec = Spec.from_yaml(root_spec_info)
else:
- raise SpecParseError('Unrecognized spec format {0}.'.format(
- spec_format))
+ raise SpecParseError("Unrecognized spec format {0}.".format(spec_format))
dep_list = dependencies
if not dep_list:
@@ -5399,18 +5337,18 @@ def save_dependency_specfiles(
for dep_name in dep_list:
if dep_name not in root_spec:
- msg = 'Dependency {0} does not exist in root spec {1}'.format(
- dep_name, root_spec.name)
+ msg = "Dependency {0} does not exist in root spec {1}".format(dep_name, root_spec.name)
raise SpecDependencyNotFoundError(msg)
dep_spec = root_spec[dep_name]
- json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
+ json_path = os.path.join(output_directory, "{0}.json".format(dep_name))
- with open(json_path, 'w') as fd:
+ with open(json_path, "w") as fd:
fd.write(dep_spec.to_json(hash=ht.dag_hash))
class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs."""
+
def __init__(self, parse_error):
super(SpecParseError, self).__init__(parse_error.message)
self.string = parse_error.string
@@ -5441,9 +5379,11 @@ class DuplicateCompilerSpecError(spack.error.SpecError):
class UnsupportedCompilerError(spack.error.SpecError):
"""Raised when the user asks for a compiler spack doesn't know about."""
+
def __init__(self, compiler_name):
super(UnsupportedCompilerError, self).__init__(
- "The '%s' compiler is not yet supported." % compiler_name)
+ "The '%s' compiler is not yet supported." % compiler_name
+ )
class DuplicateArchitectureError(spack.error.SpecError):
@@ -5452,96 +5392,105 @@ class DuplicateArchitectureError(spack.error.SpecError):
class InconsistentSpecError(spack.error.SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
- constraints."""
+ constraints."""
class InvalidDependencyError(spack.error.SpecError):
"""Raised when a dependency in a spec is not actually a dependency
- of the package."""
+ of the package."""
+
def __init__(self, pkg, deps):
self.invalid_deps = deps
super(InvalidDependencyError, self).__init__(
- 'Package {0} does not depend on {1}'.format(
- pkg, spack.util.string.comma_or(deps)))
+ "Package {0} does not depend on {1}".format(pkg, spack.util.string.comma_or(deps))
+ )
class NoProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
- virtual dependency.
+ virtual dependency.
"""
+
def __init__(self, vpkg):
super(NoProviderError, self).__init__(
- "No providers found for virtual package: '%s'" % vpkg)
+ "No providers found for virtual package: '%s'" % vpkg
+ )
self.vpkg = vpkg
class MultipleProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
- virtual dependency.
+ virtual dependency.
"""
+
def __init__(self, vpkg, providers):
"""Takes the name of the vpkg"""
super(MultipleProviderError, self).__init__(
- "Multiple providers found for '%s': %s"
- % (vpkg, [str(s) for s in providers]))
+ "Multiple providers found for '%s': %s" % (vpkg, [str(s) for s in providers])
+ )
self.vpkg = vpkg
self.providers = providers
class UnsatisfiableSpecNameError(spack.error.UnsatisfiableSpecError):
"""Raised when two specs aren't even for the same package."""
+
def __init__(self, provided, required):
- super(UnsatisfiableSpecNameError, self).__init__(
- provided, required, "name")
+ super(UnsatisfiableSpecNameError, self).__init__(provided, required, "name")
class UnsatisfiableVersionSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints."""
+
def __init__(self, provided, required):
- super(UnsatisfiableVersionSpecError, self).__init__(
- provided, required, "version")
+ super(UnsatisfiableVersionSpecError, self).__init__(provided, required, "version")
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints."""
+
def __init__(self, provided, required):
- super(UnsatisfiableCompilerSpecError, self).__init__(
- provided, required, "compiler")
+ super(UnsatisfiableCompilerSpecError, self).__init__(provided, required, "compiler")
class UnsatisfiableCompilerFlagSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
+
def __init__(self, provided, required):
super(UnsatisfiableCompilerFlagSpecError, self).__init__(
- provided, required, "compiler_flags")
+ provided, required, "compiler_flags"
+ )
class UnsatisfiableArchitectureSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
+
def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(
- provided, required, "architecture")
+ provided, required, "architecture"
+ )
class UnsatisfiableProviderSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a provider is supplied but constraints don't match
- a vpkg requirement"""
+ a vpkg requirement"""
+
def __init__(self, provided, required):
- super(UnsatisfiableProviderSpecError, self).__init__(
- provided, required, "provider")
+ super(UnsatisfiableProviderSpecError, self).__init__(provided, required, "provider")
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
class UnsatisfiableDependencySpecError(spack.error.UnsatisfiableSpecError):
"""Raised when some dependency of constrained specs are incompatible"""
+
def __init__(self, provided, required):
- super(UnsatisfiableDependencySpecError, self).__init__(
- provided, required, "dependency")
+ super(UnsatisfiableDependencySpecError, self).__init__(provided, required, "dependency")
class UnconstrainableDependencySpecError(spack.error.SpecError):
"""Raised when attempting to constrain by an anonymous dependency spec"""
+
def __init__(self, spec):
msg = "Cannot constrain by spec '%s'. Cannot constrain by a" % spec
msg += " spec containing anonymous dependencies"
@@ -5550,25 +5499,22 @@ class UnconstrainableDependencySpecError(spack.error.SpecError):
class AmbiguousHashError(spack.error.SpecError):
def __init__(self, msg, *specs):
- spec_fmt = '{namespace}.{name}{@version}{%compiler}{compiler_flags}'
- spec_fmt += '{variants}{arch=architecture}{/hash:7}'
- specs_str = '\n ' + '\n '.join(spec.format(spec_fmt)
- for spec in specs)
+ spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
+ spec_fmt += "{variants}{arch=architecture}{/hash:7}"
+ specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
super(AmbiguousHashError, self).__init__(msg + specs_str)
class InvalidHashError(spack.error.SpecError):
def __init__(self, spec, hash):
super(InvalidHashError, self).__init__(
- "The spec specified by %s does not match provided spec %s"
- % (hash, spec))
+ "The spec specified by %s does not match provided spec %s" % (hash, spec)
+ )
class NoSuchHashError(spack.error.SpecError):
def __init__(self, hash):
- super(NoSuchHashError, self).__init__(
- "No installed spec matches the hash: '%s'"
- % hash)
+ super(NoSuchHashError, self).__init__("No installed spec matches the hash: '%s'" % hash)
class SpecFilenameError(spack.error.SpecError):
@@ -5583,8 +5529,8 @@ class RedundantSpecError(spack.error.SpecError):
def __init__(self, spec, addition):
super(RedundantSpecError, self).__init__(
"Attempting to add %s to spec %s which is already concrete."
- " This is likely the result of adding to a spec specified by hash."
- % (addition, spec))
+ " This is likely the result of adding to a spec specified by hash." % (addition, spec)
+ )
class SpecFormatStringError(spack.error.SpecError):
@@ -5593,21 +5539,20 @@ class SpecFormatStringError(spack.error.SpecError):
class SpecFormatSigilError(SpecFormatStringError):
"""Called for mismatched sigils and attributes in format strings"""
+
def __init__(self, sigil, requirement, used):
- msg = 'The sigil %s may only be used for %s.' % (sigil, requirement)
- msg += ' It was used with the attribute %s.' % used
+ msg = "The sigil %s may only be used for %s." % (sigil, requirement)
+ msg += " It was used with the attribute %s." % used
super(SpecFormatSigilError, self).__init__(msg)
class ConflictsInSpecError(spack.error.SpecError, RuntimeError):
def __init__(self, spec, matches):
- message = 'Conflicts in concretized spec "{0}"\n'.format(
- spec.short_spec
- )
+ message = 'Conflicts in concretized spec "{0}"\n'.format(spec.short_spec)
visited = set()
- long_message = ''
+ long_message = ""
match_fmt_default = '{0}. "{1}" conflicts with "{2}"\n'
match_fmt_custom = '{0}. "{1}" conflicts with "{2}" [{3}]\n'
@@ -5616,8 +5561,8 @@ class ConflictsInSpecError(spack.error.SpecError, RuntimeError):
if s not in visited:
visited.add(s)
- long_message += 'List of matching conflicts for spec:\n\n'
- long_message += s.tree(indent=4) + '\n'
+ long_message += "List of matching conflicts for spec:\n\n"
+ long_message += s.tree(indent=4) + "\n"
if msg is None:
long_message += match_fmt_default.format(idx + 1, c, w)