summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorTodd Gamblin <tgamblin@llnl.gov>2013-12-07 13:51:46 -0800
committerTodd Gamblin <tgamblin@llnl.gov>2013-12-07 13:51:46 -0800
commit87fedc7e1e132e52405ee4e0105bb8267ae57c1b (patch)
tree7567e3612ae445fc1b7de149435ff720da21be68 /lib
parent344e902b155a62a911b637f2feb71bbeb56c4a95 (diff)
downloadspack-87fedc7e1e132e52405ee4e0105bb8267ae57c1b.tar.gz
spack-87fedc7e1e132e52405ee4e0105bb8267ae57c1b.tar.bz2
spack-87fedc7e1e132e52405ee4e0105bb8267ae57c1b.tar.xz
spack-87fedc7e1e132e52405ee4e0105bb8267ae57c1b.zip
Added support for virtual dependencies ("provides")
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/cmd/spec.py4
-rw-r--r--lib/spack/spack/concretize.py13
-rw-r--r--lib/spack/spack/package.py48
-rw-r--r--lib/spack/spack/packages/__init__.py98
-rw-r--r--lib/spack/spack/relations.py88
-rw-r--r--lib/spack/spack/spec.py273
-rw-r--r--lib/spack/spack/test/concretize.py30
-rw-r--r--lib/spack/spack/test/mock_packages/callpath.py2
-rw-r--r--lib/spack/spack/test/mock_packages/mpileaks.py2
-rw-r--r--lib/spack/spack/test/spec_dag.py130
10 files changed, 533 insertions, 155 deletions
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index d8146405c9..bce93d45e1 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -19,7 +19,3 @@ def spec(parser, args):
spec.concretize()
print spec.tree(color=True)
-
- pkg = spec.package
- wc = url.wildcard_version(pkg.url)
- print wc
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 2d33a544e0..e4674b8959 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -11,6 +11,7 @@ TODO: make this customizable and allow users to configure
"""
import spack.arch
import spack.compilers
+import spack.packages
from spack.version import *
from spack.spec import *
@@ -84,3 +85,15 @@ class DefaultConcretizer(object):
raise spack.spec.UnknownCompilerError(str(spec.compiler))
else:
spec.compiler = spack.compilers.default_compiler()
+
+
+ def choose_provider(self, spec, providers):
+ """This is invoked for virtual specs. Given a spec with a virtual name,
+ say "mpi", and a list of specs of possible providers of that spec,
+ select a provider and return it.
+
+ Default implementation just chooses the last provider in sorted order.
+ """
+ assert(spec.virtual)
+ assert(providers)
+ return sorted(providers)[-1]
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index d9f76b411e..5f98380bf5 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -391,8 +391,10 @@ class Package(object):
return tuple(self._dependents)
- def preorder_traversal(self, visited=None):
+ def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
+ virtual = kwargs.get("virtual", False)
+
if visited is None:
visited = set()
@@ -400,16 +402,41 @@ class Package(object):
return
visited.add(self.name)
- yield self
+ if not virtual:
+ yield self
+
for name in sorted(self.dependencies.keys()):
spec = self.dependencies[name]
- for pkg in packages.get(name).preorder_traversal(visited):
+
+ # currently, we do not descend into virtual dependencies, as this
+ # makes doing a sensible traversal much harder. We just assume that
+ # ANY of the virtual deps will work, which might not be true (due to
+ # conflicts or unsatisfiable specs). For now this is ok but we might
+ # want to reinvestigate if we start using a lot of complicated virtual
+ # dependencies
+ # TODO: reinvestigate this.
+ if spec.virtual:
+ if virtual:
+ yield spec
+ continue
+
+ for pkg in packages.get(name).preorder_traversal(visited, **kwargs):
yield pkg
def validate_dependencies(self):
"""Ensure that this package and its dependencies all have consistent
constraints on them.
+
+ NOTE that this will NOT find sanity problems through a virtual
+ dependency. Virtual deps complicate the problem because we
+ don't know in advance which ones conflict with others in the
+ dependency DAG. If there's more than one virtual dependency,
+ it's a full-on SAT problem, so hold off on this for now.
+ The vdeps are actually skipped in preorder_traversal, so see
+ that for details.
+
+ TODO: investigate validating virtual dependencies.
"""
# This algorithm just attempts to merge all the constraints on the same
# package together, loses information about the source of the conflict.
@@ -432,13 +459,14 @@ class Package(object):
% (self.name, e.message))
- @property
- @memoized
- def all_dependencies(self):
- """Dict(str -> Package) of all transitive dependencies of this package."""
- all_deps = {name : dep for dep in self.preorder_traversal}
- del all_deps[self.name]
- return all_deps
+ def provides(self, vpkg_name):
+ """True if this package provides a virtual package with the specified name."""
+ return vpkg_name in self.provided
+
+
+ def virtual_dependencies(self, visited=None):
+ for spec in sorted(set(self.preorder_traversal(virtual=True))):
+ yield spec
@property
diff --git a/lib/spack/spack/packages/__init__.py b/lib/spack/spack/packages/__init__.py
index ecdd380e0c..d4d1d0b786 100644
--- a/lib/spack/spack/packages/__init__.py
+++ b/lib/spack/spack/packages/__init__.py
@@ -8,6 +8,7 @@ import glob
import spack
import spack.error
import spack.spec
+import spack.tty as tty
from spack.util.filesystem import new_path
from spack.util.lang import list_modules
import spack.arch as arch
@@ -19,7 +20,60 @@ valid_package_re = r'^\w[\w-]*$'
invalid_package_re = r'[_-][_-]+'
instances = {}
-providers = {}
+
+
+class ProviderIndex(object):
+ """This is a dict of dicts used for finding providers of particular
+ virtual dependencies. The dict of dicts looks like:
+
+ { vpkg name :
+ { full vpkg spec : package providing spec } }
+
+ Callers can use this to first find which packages provide a vpkg,
+ then find a matching full spec. e.g., in this scenario:
+
+ { 'mpi' :
+ { mpi@:1.1 : mpich,
+ mpi@:2.3 : mpich2@1.9: } }
+
+ Calling find_provider(spec) will find a package that provides a
+ matching implementation of MPI.
+ """
+ def __init__(self, providers):
+ """Takes a list of provider packagse and build an index of the virtual
+ packages they provide."""
+ self.providers = {}
+ self.add(*providers)
+
+
+ def add(self, *providers):
+ """Look at the provided map on the provider packages, invert it and
+ add it to this provider index."""
+ for pkg in providers:
+ for provided_spec, provider_spec in pkg.provided.iteritems():
+ provided_name = provided_spec.name
+ if provided_name not in self.providers:
+ self.providers[provided_name] = {}
+ self.providers[provided_name][provided_spec] = provider_spec
+
+
+ def providers_for(self, *vpkg_specs):
+ """Gives names of all packages that provide virtual packages
+ with the supplied names."""
+ packages = set()
+ for vspec in vpkg_specs:
+ # Allow string names to be passed as input, as well as specs
+ if type(vspec) == str:
+ vspec = spack.spec.Spec(vspec)
+
+ # Add all the packages that satisfy the vpkg spec.
+ if vspec.name in self.providers:
+ for provider_spec, pkg in self.providers[vspec.name].items():
+ if provider_spec.satisfies(vspec):
+ packages.add(pkg)
+
+ # Return packages in order
+ return sorted(packages)
def get(pkg_name):
@@ -30,22 +84,15 @@ def get(pkg_name):
return instances[pkg_name]
-def get_providers(vpkg_name):
- if not providers:
- compute_providers()
-
- if not vpkg_name in providers:
- raise UnknownPackageError("No such virtual package: %s" % vpkg_name)
-
- return providers[vpkg_name]
-
+def providers_for(vpkg_spec):
+ if providers_for.index is None:
+ providers_for.index = ProviderIndex(all_packages())
-def compute_providers():
- for pkg in all_packages():
- for vpkg in pkg.provided:
- if vpkg not in providers:
- providers[vpkg] = []
- providers[vpkg].append(pkg)
+ providers = providers_for.index.providers_for(vpkg_spec)
+ if not providers:
+ raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
+ return providers
+providers_for.index = None
def valid_package_name(pkg_name):
@@ -99,6 +146,13 @@ def exists(pkg_name):
return os.path.exists(filename_for_package_name(pkg_name))
+def packages_module():
+ # TODO: replace this with a proper package DB class, instead of this hackiness.
+ packages_path = re.sub(spack.module_path + '\/+', 'spack.', spack.packages_path)
+ packages_module = re.sub(r'\/', '.', packages_path)
+ return packages_module
+
+
def get_class_for_package_name(pkg_name):
file_name = filename_for_package_name(pkg_name)
@@ -115,22 +169,18 @@ def get_class_for_package_name(pkg_name):
if not re.match(r'%s' % spack.module_path, spack.packages_path):
raise RuntimeError("Packages path is not a submodule of spack.")
- # TODO: replace this with a proper package DB class, instead of this hackiness.
- packages_path = re.sub(spack.module_path + '\/+', 'spack.', spack.packages_path)
- packages_module = re.sub(r'\/', '.', packages_path)
-
class_name = pkg_name.capitalize()
try:
- module_name = "%s.%s" % (packages_module, pkg_name)
+ module_name = "%s.%s" % (packages_module(), pkg_name)
module = __import__(module_name, fromlist=[class_name])
except ImportError, e:
tty.die("Error while importing %s.%s:\n%s" % (pkg_name, class_name, e.message))
- klass = getattr(module, class_name)
- if not inspect.isclass(klass):
+ cls = getattr(module, class_name)
+ if not inspect.isclass(cls):
tty.die("%s.%s is not a class" % (pkg_name, class_name))
- return klass
+ return cls
def compute_dependents():
diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py
index 323c9db2ac..2c8395d408 100644
--- a/lib/spack/spack/relations.py
+++ b/lib/spack/spack/relations.py
@@ -44,9 +44,15 @@ provides
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
-import sys
+import re
import inspect
+import importlib
+
+import spack
import spack.spec
+from spack.spec import Spec
+import spack.error
+from spack.packages import packages_module
def _caller_locals():
@@ -62,8 +68,61 @@ def _caller_locals():
del stack
+def _ensure_caller_is_spack_package():
+ """Make sure that the caller is a spack package. If it's not,
+ raise ScopeError. if it is, return its name."""
+ stack = inspect.stack()
+ try:
+ # get calling function name (the relation)
+ relation = stack[1][3]
+
+ # Make sure locals contain __module__
+ caller_locals = stack[2][0].f_locals
+ finally:
+ del stack
+
+ if not '__module__' in caller_locals:
+ raise ScopeError(relation)
+
+ module_name = caller_locals['__module__']
+ if not module_name.startswith(packages_module()):
+ raise ScopeError(relation)
+
+ base_name = module_name.split('.')[-1]
+ return base_name
+
+
+def _parse_local_spec(spec_like, pkg_name):
+ """Allow the user to omit the package name part of a spec in relations.
+ e.g., provides('mpi@2.1', when='@1.9:') says that this package provides
+ MPI 2.1 when its version is higher than 1.9.
+ """
+ if type(spec_like) not in (str, Spec):
+ raise TypeError('spec must be Spec or spec string. Found %s'
+ % type(spec_like))
+
+ if type(spec_like) == str:
+ try:
+ local_spec = Spec(spec_like)
+ except ParseError:
+ local_spec = Spec(pkg_name + spec_like)
+ if local_spec.name != pkg_name: raise ValueError(
+ "Invalid spec for package %s: %s" % (pkg_name, spec_like))
+ else:
+ local_spec = spec_like
+
+ if local_spec.name != pkg_name:
+ raise ValueError("Spec name '%s' must match package name '%s'"
+ % (spec_like.name, pkg_name))
+
+ return local_spec
+
+
+
+
def _make_relation(map_name):
def relation_fun(*specs):
+ _ensure_caller_is_spack_package()
package_map = _caller_locals().setdefault(map_name, {})
for string in specs:
for spec in spack.spec.parse(string):
@@ -76,14 +135,31 @@ def _make_relation(map_name):
depends_on = _make_relation("dependencies")
-"""Allows packages to provide a virtual dependency. If a package provides
- 'mpi', other packages can declare that they depend on "mpi", and spack
- can use the providing package to satisfy the dependency.
-"""
-provides = _make_relation("provided")
+def provides(*specs, **kwargs):
+ """Allows packages to provide a virtual dependency. If a package provides
+ 'mpi', other packages can declare that they depend on "mpi", and spack
+ can use the providing package to satisfy the dependency.
+ """
+ pkg = _ensure_caller_is_spack_package()
+ spec_string = kwargs.get('when', pkg)
+ provider_spec = _parse_local_spec(spec_string, pkg)
+
+ provided = _caller_locals().setdefault("provided", {})
+ for string in specs:
+ for provided_spec in spack.spec.parse(string):
+ provided[provided_spec] = provider_spec
"""Packages can declare conflicts with other packages.
This can be as specific as you like: use regular spec syntax.
"""
conflicts = _make_relation("conflicted")
+
+
+
+class ScopeError(spack.error.SpackError):
+ """This is raised when a relation is called from outside a spack package."""
+ def __init__(self, relation):
+ super(ScopeError, self).__init__(
+ "Cannot inovke '%s' from outside of a Spack package!" % relation)
+ self.relation = relation
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index e6d508c074..a7c3488cd5 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -247,12 +247,13 @@ class Spec(object):
if len(spec_list) < 1:
raise ValueError("String contains no specs: " + spec_like)
- # Take all the attributes from the first parsed spec without copying
- # This is a little bit nasty, but it's nastier to make the parser
- # write directly into this Spec object.
+ # Take all the attributes from the first parsed spec without copying.
+ # This is safe b/c we throw out the parsed spec. It's a bit nasty,
+ # but it's nastier to implement the constructor so that the parser
+ # writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
- self.parent = other.parent
+ self.dependents = other.dependents
self.versions = other.versions
self.variants = other.variants
self.architecture = other.architecture
@@ -263,11 +264,8 @@ class Spec(object):
# Note that given two specs a and b, Spec(a) copies a, but
# Spec(a, b) will copy a but just add b as a dep.
for dep in dep_like:
- if type(dep) == str:
- dep_spec = Spec(dep)
- self.dependencies[dep_spec.name] = dep_spec
- elif type(dep) == Spec:
- self.dependencies[dep.name] = dep
+ spec = dep if type(dep) == Spec else Spec(dep)
+ self._add_dependency(spec)
#
@@ -299,21 +297,31 @@ class Spec(object):
self.architecture = architecture
- def _add_dependency(self, dep):
+ def _add_dependency(self, spec):
"""Called by the parser to add another spec as a dependency."""
- if dep.name in self.dependencies:
- raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep)
- self.dependencies[dep.name] = dep
- dep.parent = self
+ if spec.name in self.dependencies:
+ raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
+ self.dependencies[spec.name] = spec
+ spec.dependents[self.name] = self
@property
def root(self):
- """Follow parent links and find the root of this spec's DAG."""
- root = self
- while root.parent is not None:
- root = root.parent
- return root
+ """Follow dependent links and find the root of this spec's DAG.
+ In spack specs, there should be a single root (the package being
+ installed). This will throw an assertion error if that is not
+ the case.
+ """
+ if not self.dependents:
+ return self
+ else:
+ # If the spec has multiple dependents, ensure that they all
+ # lead to the same place. Spack shouldn't deal with any DAGs
+ # with multiple roots, so something's wrong if we find one.
+ depiter = iter(self.dependents.values())
+ first_root = next(depiter).root
+ assert(all(first_root is d.root for d in depiter))
+ return first_root
@property
@@ -353,10 +361,10 @@ class Spec(object):
This will yield each node in the spec. Options:
unique [=True]
- When True (default) every node in the DAG is yielded only once.
- When False, the traversal will yield already visited nodes but
- not their children. This lets you see that a node ponts to
- an already-visited subgraph without descending into it.
+ When True (default), every node in the DAG is yielded only once.
+ When False, the traversal will yield already visited
+ nodes but not their children. This lets you see that a node
+ points to an already-visited subgraph without descending into it.
depth [=False]
Defaults to False. When True, yields not just nodes in the
@@ -388,31 +396,67 @@ class Spec(object):
yield (d, self) if depth else self
for key in sorted(self.dependencies.keys()):
- for spec in self.dependencies[key].preorder_traversal(
+ for result in self.dependencies[key].preorder_traversal(
visited, d+1, **kwargs):
- yield spec
+ yield result
- def _concretize_helper(self, presets):
+ def _concretize_helper(self, presets=None, visited=None):
"""Recursive helper function for concretize().
This concretizes everything bottom-up. As things are
concretized, they're added to the presets, and ancestors
will prefer the settings of their children.
"""
+ if presets is None: presets = {}
+ if visited is None: visited = set()
+
+ if self.name in visited:
+ return
+
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self.dependencies.keys()):
- self.dependencies[name]._concretize_helper(presets)
+ self.dependencies[name]._concretize_helper(presets, visited)
if self.name in presets:
self.constrain(presets[self.name])
else:
- spack.concretizer.concretize_architecture(self)
- spack.concretizer.concretize_compiler(self)
- spack.concretizer.concretize_version(self)
+ # Concretize virtual dependencies last. Because they're added
+ # to presets below, their constraints will all be merged, but we'll
+ # still need to select a concrete package later.
+ if not self.virtual:
+ spack.concretizer.concretize_architecture(self)
+ spack.concretizer.concretize_compiler(self)
+ spack.concretizer.concretize_version(self)
presets[self.name] = self
+ visited.add(self.name)
+
+
+ def _expand_virtual_packages(self):
+ """Find virtual packages in this spec, replace them with providers,
+ and normalize again to include the provider's (potentially virtual)
+ dependencies. Repeat until there are no virtual deps.
+ """
+ while True:
+ virtuals =[v for v in self.preorder_traversal() if v.virtual]
+ if not virtuals:
+ return
- def concretize(self, *presets):
+ for spec in virtuals:
+ providers = packages.providers_for(spec)
+ concrete = spack.concretizer.choose_provider(spec, providers)
+ concrete = concrete.copy()
+
+ for name, dependent in spec.dependents.items():
+ del dependent.dependencies[spec.name]
+ dependent._add_dependency(concrete)
+
+ # If there are duplicate providers or duplicate provider deps, this
+ # consolidates them and merges constraints.
+ self.normalize()
+
+
+ def concretize(self):
"""A spec is concrete if it describes one build of a package uniquely.
This will ensure that this spec is concrete.
@@ -424,48 +468,32 @@ class Spec(object):
with requirements of its pacakges. See flatten() and normalize() for
more details on this.
"""
- # Build specs out of user-provided presets
- specs = [Spec(p) for p in presets]
-
- # Concretize the presets first. They could be partial specs, like just
- # a particular version that the caller wants.
- for spec in specs:
- if not spec.concrete:
- try:
- spec.concretize()
- except UnsatisfiableSpecError, e:
- e.message = ("Unsatisfiable preset in concretize: %s."
- % e.message)
- raise e
-
- # build preset specs into a map
- preset_dict = {spec.name : spec for spec in specs}
-
- # Concretize bottom up, passing in presets to force concretization
- # for certain specs.
self.normalize()
- self._concretize_helper(preset_dict)
+ self._expand_virtual_packages()
+ self._concretize_helper()
- def concretized(self, *presets):
+ def concretized(self):
"""This is a non-destructive version of concretize(). First clones,
then returns a concrete version of this package without modifying
this package. """
clone = self.copy()
- clone.concretize(*presets)
+ clone.concretize()
return clone
def flat_dependencies(self):
- """Return a DependencyMap containing all dependencies with their
- constraints merged. If there are any conflicts, throw an exception.
+ """Return a DependencyMap containing all of this spec's dependencies
+ with their constraints merged. If there are any conflicts, throw
+ an exception.
This will work even on specs that are not normalized; i.e. specs
that have two instances of the same dependency in the DAG.
This is used as the first step of normalization.
"""
# This ensures that the package descriptions themselves are consistent
- self.package.validate_dependencies()
+ if not self.virtual:
+ self.package.validate_dependencies()
# Once that is guaranteed, we know any constraint violations are due
# to the spec -- so they're the user's fault, not Spack's.
@@ -497,22 +525,54 @@ class Spec(object):
self.dependencies = self.flat_dependencies()
- def _normalize_helper(self, visited, spec_deps):
+ def _normalize_helper(self, visited, spec_deps, provider_index):
"""Recursive helper function for _normalize."""
if self.name in visited:
return
visited.add(self.name)
+ # if we descend into a virtual spec, there's nothing more
+ # to normalize. Concretize will finish resolving it later.
+ if self.virtual:
+ return
+
# Combine constraints from package dependencies with
- # information in this spec's dependencies.
+ # constraints on the spec's dependencies.
pkg = packages.get(self.name)
- for name, pkg_dep in self.package.dependencies.iteritems():
+ for name, pkg_dep in self.package.dependencies.items():
+ # If it's a virtual dependency, try to find a provider
+ if pkg_dep.virtual:
+ providers = provider_index.providers_for(pkg_dep)
+
+ # If there is a provider for the vpkg, then use that instead of
+ # the virtual package. If there isn't a provider, just merge
+ # constraints on the virtual package.
+ if providers:
+ # Can't have multiple providers for the same thing in one spec.
+ if len(providers) > 1:
+ raise MultipleProviderError(pkg_dep, providers)
+
+ pkg_dep = providers[0]
+ name = pkg_dep.name
+
+ else:
+ # The user might have required something insufficient for
+ # pkg_dep -- so we'll get a conflict. e.g., user asked for
+ # mpi@:1.1 but some package required mpi@2.1:.
+ providers = provider_index.providers_for(name)
+ if len(providers) > 1:
+ raise MultipleProviderError(pkg_dep, providers)
+ if providers:
+ raise UnsatisfiableProviderSpecError(providers[0], pkg_dep)
+
+
if name not in spec_deps:
- # Clone the spec from the package
+ # If the spec doesn't reference a dependency that this package
+ # needs, then clone it from the package description.
spec_deps[name] = pkg_dep.copy()
try:
- # intersect package information with spec info
+ # Constrain package information with spec info
spec_deps[name].constrain(pkg_dep)
except UnsatisfiableSpecError, e:
@@ -523,35 +583,61 @@ class Spec(object):
raise e
# Add merged spec to my deps and recurse
- self._add_dependency(spec_deps[name])
- self.dependencies[name]._normalize_helper(visited, spec_deps)
+ dependency = spec_deps[name]
+ self._add_dependency(dependency)
+ dependency._normalize_helper(visited, spec_deps, provider_index)
def normalize(self):
- # Ensure first that all packages exist.
+ """When specs are parsed, any dependencies specified are hanging off
+ the root, and ONLY the ones that were explicitly provided are there.
+ Normalization turns a partial flat spec into a DAG, where:
+ 1) ALL dependencies of the root package are in the DAG.
+ 2) Each node's dependencies dict only contains its direct deps.
+ 3) There is only ONE unique spec for each package in the DAG.
+ - This includes virtual packages. If there a non-virtual
+ package that provides a virtual package that is in the spec,
+ then we replace the virtual package with the non-virtual one.
+ 4) The spec DAG matches package DAG.
+ """
+ # Ensure first that all packages in the DAG exist.
self.validate_package_names()
- # Then ensure that the packages mentioned are sane, that the
+ # Then ensure that the packages referenced are sane, that the
# provided spec is sane, and that all dependency specs are in the
# root node of the spec. flat_dependencies will do this for us.
spec_deps = self.flat_dependencies()
self.dependencies.clear()
+ # Figure out which of the user-provided deps provide virtual deps.
+ # Remove virtual deps that are already provided by something in the spec
+ spec_packages = [d.package for d in spec_deps.values() if not d.virtual]
+
+ index = packages.ProviderIndex(spec_packages)
visited = set()
- self._normalize_helper(visited, spec_deps)
+ self._normalize_helper(visited, spec_deps, index)
# If there are deps specified but not visited, they're not
# actually deps of this package. Raise an error.
extra = set(spec_deps.viewkeys()).difference(visited)
+
+ # Also subtract out all the packags that provide a needed vpkg
+ vdeps = [v for v in self.package.virtual_dependencies()]
+
+ vpkg_providers = index.providers_for(*vdeps)
+ extra.difference_update(p.name for p in vpkg_providers)
+
+ # Anything left over is not a valid part of the spec.
if extra:
raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(extra))
def validate_package_names(self):
- packages.get(self.name)
- for name, dep in self.dependencies.iteritems():
- dep.validate_package_names()
+ for spec in self.preorder_traversal():
+ # Don't get a package for a virtual name.
+ if not spec.virtual:
+ packages.get(spec.name)
def constrain(self, other):
@@ -593,14 +679,19 @@ class Spec(object):
def _dup(self, other, **kwargs):
- """Copy the spec other into self. This is a
- first-party, overwriting copy. This does not copy
- parent; if the other spec has a parent, this one will not.
- To duplicate an entire DAG, Duplicate the root of the DAG.
+ """Copy the spec other into self. This is an overwriting
+ copy. It does not copy any dependents (parents), but by default
+ copies dependencies.
+
+ To duplicate an entire DAG, call _dup() on the root of the DAG.
+
+ Options:
+ dependencies[=True]
+ Whether deps should be copied too. Set to false to copy a
+ spec but not its dependencies.
"""
# TODO: this needs to handle DAGs.
self.name = other.name
- self.parent = None
self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture
@@ -608,6 +699,7 @@ class Spec(object):
if other.compiler:
self.compiler = other.compiler.copy()
+ self.dependents = DependencyMap()
copy_deps = kwargs.get('dependencies', True)
if copy_deps:
self.dependencies = other.dependencies.copy()
@@ -744,11 +836,11 @@ class SpecParser(spack.parse.Parser):
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
spec.name = self.token.value
- spec.parent = None
spec.versions = VersionList()
spec.variants = VariantMap()
spec.architecture = None
spec.compiler = None
+ spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
# record this so that we know whether version is
@@ -903,6 +995,29 @@ class InvalidDependencyException(SpecError):
super(InvalidDependencyException, self).__init__(message)
+class NoProviderError(SpecError):
+ """Raised when there is no package that provides a particular
+ virtual dependency.
+ """
+ def __init__(self, vpkg):
+ super(NoProviderError, self).__init__(
+ "No providers found for virtual package: '%s'" % vpkg)
+ self.vpkg = vpkg
+
+
+class MultipleProviderError(SpecError):
+ """Raised when there is no package that provides a particular
+ virtual dependency.
+ """
+ def __init__(self, vpkg, providers):
+ """Takes the name of the vpkg"""
+ super(NoProviderError, self).__init__(
+ "Multiple providers found for vpkg '%s': %s"
+ % (vpkg, [str(s) for s in providers]))
+ self.vpkg = vpkg
+ self.providers = providers
+
+
class UnsatisfiableSpecError(SpecError):
"""Raised when a spec conflicts with package constraints.
Provide the requirement that was violated when raising."""
@@ -940,3 +1055,11 @@ class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(
provided, required, "architecture")
+
+
+class UnsatisfiableProviderSpecError(UnsatisfiableSpecError):
+ """Raised when a provider is supplied but constraints don't match
+ a vpkg requirement"""
+ def __init__(self, provided, required):
+ super(UnsatisfiableProviderSpecError, self).__init__(
+ provided, required, "provider")
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index c95db03609..d1a82a58b1 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -18,9 +18,9 @@ class ConcretizeTest(MockPackagesTest):
self.assertEqual(abstract.architecture, concrete.architecture)
- def check_concretize(self, abstract_spec, *presets):
+ def check_concretize(self, abstract_spec):
abstract = Spec(abstract_spec)
- concrete = abstract.concretized(*presets)
+ concrete = abstract.concretized()
self.assertFalse(abstract.concrete)
self.assertTrue(concrete.concrete)
@@ -29,32 +29,14 @@ class ConcretizeTest(MockPackagesTest):
return concrete
- def check_presets(self, abstract, *presets):
- abstract = Spec(abstract)
- concrete = self.check_concretize(abstract, *presets)
-
- flat_deps = concrete.flat_dependencies()
- for preset in presets:
- preset_spec = Spec(preset)
- name = preset_spec.name
-
- self.assertTrue(name in flat_deps)
- self.check_spec(preset_spec, flat_deps[name])
-
- return concrete
-
-
def test_concretize_no_deps(self):
self.check_concretize('libelf')
self.check_concretize('libelf@0.8.13')
def test_concretize_dag(self):
- self.check_concretize('mpileaks')
- self.check_concretize('callpath')
-
+ spec = Spec('mpileaks')
+ spec.normalize()
- def test_concretize_with_presets(self):
- self.check_presets('mpileaks', 'callpath@0.8')
- self.check_presets('mpileaks', 'callpath@0.9', 'dyninst@8.0+debug')
- self.check_concretize('callpath', 'libelf@0.8.13+debug~foo', 'mpich@1.0')
+ self.check_concretize('callpath')
+ self.check_concretize('mpileaks')
diff --git a/lib/spack/spack/test/mock_packages/callpath.py b/lib/spack/spack/test/mock_packages/callpath.py
index edc0833de4..ec59172d82 100644
--- a/lib/spack/spack/test/mock_packages/callpath.py
+++ b/lib/spack/spack/test/mock_packages/callpath.py
@@ -10,7 +10,7 @@ class Callpath(Package):
1.0 : 'bf03b33375afa66fe0efa46ce3f4b17a' }
depends_on("dyninst")
- depends_on("mpich")
+ depends_on("mpi")
def install(self, prefix):
configure("--prefix=%s" % prefix)
diff --git a/lib/spack/spack/test/mock_packages/mpileaks.py b/lib/spack/spack/test/mock_packages/mpileaks.py
index c355bb226f..e99251a85c 100644
--- a/lib/spack/spack/test/mock_packages/mpileaks.py
+++ b/lib/spack/spack/test/mock_packages/mpileaks.py
@@ -10,7 +10,7 @@ class Mpileaks(Package):
2.2 : None,
2.3 : None }
- depends_on("mpich")
+ depends_on("mpi")
depends_on("callpath")
def install(self, prefix):
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 452884e539..914cd967fa 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -28,6 +28,44 @@ class ValidationTest(MockPackagesTest):
spec.package.validate_dependencies)
+ def test_preorder_traversal(self):
+ dag = Spec('mpileaks',
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpich')),
+ Spec('mpich'))
+ dag.normalize()
+
+ unique_names = [
+ 'mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'mpich']
+ unique_depths = [0,1,2,3,4,2]
+
+ non_unique_names = [
+ 'mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'libelf',
+ 'mpich', 'mpich']
+ non_unique_depths = [0,1,2,3,4,3,2,1]
+
+ self.assertListEqual(
+ [x.name for x in dag.preorder_traversal()],
+ unique_names)
+
+ self.assertListEqual(
+ [(x, y.name) for x,y in dag.preorder_traversal(depth=True)],
+ zip(unique_depths, unique_names))
+
+ self.assertListEqual(
+ [x.name for x in dag.preorder_traversal(unique=False)],
+ non_unique_names)
+
+ self.assertListEqual(
+ [(x, y.name) for x,y in dag.preorder_traversal(unique=False, depth=True)],
+ zip(non_unique_depths, non_unique_names))
+
+
+
def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
try:
@@ -63,6 +101,56 @@ class ValidationTest(MockPackagesTest):
spec.normalize()
+ def test_normalize_with_virtual_spec(self):
+ dag = Spec('mpileaks',
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpi')),
+ Spec('mpi'))
+ dag.normalize()
+
+ # make sure nothing with the same name occurs twice
+ counts = {}
+ for spec in dag.preorder_traversal(keyfun=id):
+ if not spec.name in counts:
+ counts[spec.name] = 0
+ counts[spec.name] += 1
+
+ for name in counts:
+ self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name)
+
+
+ def check_links(self, spec_to_check):
+ for spec in spec_to_check.preorder_traversal():
+ for dependent in spec.dependents.values():
+ self.assertIn(
+ spec.name, dependent.dependencies,
+ "%s not in dependencies of %s" % (spec.name, dependent.name))
+
+ for dependency in spec.dependencies.values():
+ self.assertIn(
+ spec.name, dependency.dependents,
+ "%s not in dependents of %s" % (spec.name, dependency.name))
+
+
+ def test_dependents_and_dependencies_are_correct(self):
+ spec = Spec('mpileaks',
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpi')),
+ Spec('mpi'))
+
+ self.check_links(spec)
+ spec.normalize()
+ self.check_links(spec)
+
+
def test_unsatisfiable_version(self):
set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
@@ -134,29 +222,51 @@ class ValidationTest(MockPackagesTest):
expected_normalized = Spec(
'mpileaks',
Spec('callpath',
- Spec('dyninst', Spec('libdwarf', libelf),
+ Spec('dyninst',
+ Spec('libdwarf',
+ libelf),
libelf),
- mpich), mpich)
+ mpich),
+ mpich)
- expected_non_dag = Spec(
+ expected_non_unique_nodes = Spec(
'mpileaks',
Spec('callpath',
- Spec('dyninst', Spec('libdwarf', Spec('libelf@1.8.11')),
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf@1.8.11')),
Spec('libelf@1.8.11')),
- mpich), Spec('mpich'))
+ mpich),
+ Spec('mpich'))
- self.assertEqual(expected_normalized, expected_non_dag)
+ self.assertEqual(expected_normalized, expected_non_unique_nodes)
- self.assertEqual(str(expected_normalized), str(expected_non_dag))
- self.assertEqual(str(spec), str(expected_non_dag))
+ self.assertEqual(str(expected_normalized), str(expected_non_unique_nodes))
+ self.assertEqual(str(spec), str(expected_non_unique_nodes))
self.assertEqual(str(expected_normalized), str(spec))
self.assertEqual(spec, expected_flat)
self.assertNotEqual(spec, expected_normalized)
- self.assertNotEqual(spec, expected_non_dag)
+ self.assertNotEqual(spec, expected_non_unique_nodes)
spec.normalize()
self.assertNotEqual(spec, expected_flat)
self.assertEqual(spec, expected_normalized)
- self.assertEqual(spec, expected_non_dag)
+ self.assertEqual(spec, expected_non_unique_nodes)
+
+
+ def test_normalize_with_virtual_package(self):
+ spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
+ spec.normalize()
+
+ expected_normalized = Spec(
+ 'mpileaks',
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf@1.8.11')),
+ Spec('libelf@1.8.11')),
+ Spec('mpi')), Spec('mpi'))
+
+ self.assertEqual(str(spec), str(expected_normalized))