diff options
Diffstat (limited to 'lib/spack/spack/spec.py')
-rw-r--r-- | lib/spack/spack/spec.py | 331 |
1 files changed, 230 insertions, 101 deletions
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1e2da10dcc..4838fd9946 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -94,6 +94,7 @@ import sys import itertools import hashlib from StringIO import StringIO +from operator import attrgetter import llnl.util.tty as tty from llnl.util.lang import * @@ -309,9 +310,8 @@ class DependencyMap(HashableMap): def __str__(self): - sorted_dep_names = sorted(self.keys()) return ''.join( - ["^" + str(self[name]) for name in sorted_dep_names]) + ["^" + str(self[name]) for name in sorted(self.keys())]) @key_ordering @@ -345,6 +345,13 @@ class Spec(object): self.compiler = other.compiler self.dependencies = other.dependencies + # Specs are by default not assumed to be normal, but in some + # cases we've read them from a file want to assume normal. + # This allows us to manipulate specs that Spack doesn't have + # package.py files for. + self._normal = kwargs.get('normal', False) + self._concrete = kwargs.get('concrete', False) + # This allows users to construct a spec DAG with literals. # Note that given two specs a and b, Spec(a) copies a, but # Spec(a, b) will copy a but just add b as a dep. @@ -432,17 +439,30 @@ class Spec(object): If any of the name, version, architecture, compiler, or depdenencies are ambiguous,then it is not concrete. """ - return bool(not self.virtual - and self.versions.concrete - and self.architecture - and self.compiler and self.compiler.concrete - and self.dependencies.concrete) + if self._concrete: + return True + + self._concrete = bool(not self.virtual + and self.versions.concrete + and self.architecture + and self.compiler and self.compiler.concrete + and self.dependencies.concrete) + return self._concrete - def preorder_traversal(self, visited=None, d=0, **kwargs): - """Generic preorder traversal of the DAG represented by this spec. + def traverse(self, visited=None, d=0, **kwargs): + """Generic traversal of the DAG represented by this spec. This will yield each node in the spec. Options: + order [=pre|post] + Order to traverse spec nodes. Defaults to preorder traversal. + Options are: + + 'pre': Pre-order traversal; each node is yielded before its + children in the dependency DAG. + 'post': Post-order traversal; each node is yielded after its + children in the dependency DAG. + cover [=nodes|edges|paths] Determines how extensively to cover the dag. Possible vlaues: @@ -460,7 +480,7 @@ class Spec(object): spec, but also their depth from the root in a (depth, node) tuple. - keyfun [=id] + key [=id] Allow a custom key function to track the identity of nodes in the traversal. @@ -472,44 +492,57 @@ class Spec(object): 'parents', traverses upwards in the DAG towards the root. """ + # get initial values for kwargs depth = kwargs.get('depth', False) key_fun = kwargs.get('key', id) + if isinstance(key_fun, basestring): + key_fun = attrgetter(key_fun) yield_root = kwargs.get('root', True) cover = kwargs.get('cover', 'nodes') direction = kwargs.get('direction', 'children') + order = kwargs.get('order', 'pre') - cover_values = ('nodes', 'edges', 'paths') - if cover not in cover_values: - raise ValueError("Invalid value for cover: %s. Choices are %s" - % (cover, ",".join(cover_values))) - - direction_values = ('children', 'parents') - if direction not in direction_values: - raise ValueError("Invalid value for direction: %s. Choices are %s" - % (direction, ",".join(direction_values))) + # Make sure kwargs have legal values; raise ValueError if not. + def validate(name, val, allowed_values): + if val not in allowed_values: + raise ValueError("Invalid value for %s: %s. Choices are %s" + % (name, val, ",".join(allowed_values))) + validate('cover', cover, ('nodes', 'edges', 'paths')) + validate('direction', direction, ('children', 'parents')) + validate('order', order, ('pre', 'post')) if visited is None: visited = set() + key = key_fun(self) + + # Node traversal does not yield visited nodes. + if key in visited and cover == 'nodes': + return + # Determine whether and what to yield for this node. + yield_me = yield_root or d > 0 result = (d, self) if depth else self - key = key_fun(self) - if key in visited: - if cover == 'nodes': return - if yield_root or d > 0: yield result - if cover == 'edges': return - else: - if yield_root or d > 0: yield result + # Preorder traversal yields before successors + if yield_me and order == 'pre': + yield result - successors = self.dependencies - if direction == 'parents': - successors = self.dependents + # Edge traversal yields but skips children of visited nodes + if not (key in visited and cover == 'edges'): + # This code determines direction and yields the children/parents + successors = self.dependencies + if direction == 'parents': + successors = self.dependents - visited.add(key) - for name in sorted(successors): - child = successors[name] - for elt in child.preorder_traversal(visited, d+1, **kwargs): - yield elt + visited.add(key) + for name in sorted(successors): + child = successors[name] + for elt in child.traverse(visited, d+1, **kwargs): + yield elt + + # Postorder traversal yields after successors + if yield_me and order == 'post': + yield result @property @@ -525,13 +558,14 @@ class Spec(object): def dep_hash(self, length=None): - """Return a hash representing the dependencies of this spec - This will always normalize first so that the hash is consistent. - """ - self.normalize() + """Return a hash representing all dependencies of this spec + (direct and indirect). + If you want this hash to be consistent, you should + concretize the spec first so that it is not ambiguous. + """ sha = hashlib.sha1() - sha.update(str(self.dependencies)) + sha.update(self.dep_string()) full_hash = sha.hexdigest() return full_hash[:length] @@ -594,7 +628,7 @@ class Spec(object): a problem. """ while True: - virtuals =[v for v in self.preorder_traversal() if v.virtual] + virtuals =[v for v in self.traverse() if v.virtual] if not virtuals: return @@ -605,8 +639,8 @@ class Spec(object): spec._replace_with(concrete) # If there are duplicate providers or duplicate provider deps, this - # consolidates them and merges constraints. - self.normalize() + # consolidates them and merge constraints. + self.normalize(force=True) def concretize(self): @@ -621,9 +655,13 @@ class Spec(object): with requirements of its pacakges. See flatten() and normalize() for more details on this. """ + if self._concrete: + return + self.normalize() self._expand_virtual_packages() self._concretize_helper() + self._concrete = True def concretized(self): @@ -635,47 +673,51 @@ class Spec(object): return clone - def flat_dependencies(self): - """Return a DependencyMap containing all of this spec's dependencies - with their constraints merged. If there are any conflicts, throw - an exception. + def flat_dependencies(self, **kwargs): + """Return a DependencyMap containing all of this spec's + dependencies with their constraints merged. - This will work even on specs that are not normalized; i.e. specs - that have two instances of the same dependency in the DAG. - This is used as the first step of normalization. + If copy is True, returns merged copies of its dependencies + without modifying the spec it's called on. + + If copy is False, clears this spec's dependencies and + returns them. """ - # This ensures that the package descriptions themselves are consistent - if not self.virtual: - self.package.validate_dependencies() + copy = kwargs.get('copy', True) - # Once that is guaranteed, we know any constraint violations are due - # to the spec -- so they're the user's fault, not Spack's. flat_deps = DependencyMap() try: - for spec in self.preorder_traversal(): + for spec in self.traverse(root=False): if spec.name not in flat_deps: - new_spec = spec.copy(dependencies=False) - flat_deps[spec.name] = new_spec - + if copy: + flat_deps[spec.name] = spec.copy(deps=False) + else: + flat_deps[spec.name] = spec else: flat_deps[spec.name].constrain(spec) + if not copy: + for dep in flat_deps.values(): + dep.dependencies.clear() + dep.dependents.clear() + self.dependencies.clear() + + return flat_deps + except UnsatisfiableSpecError, e: - # This REALLY shouldn't happen unless something is wrong in spack. - # It means we got a spec DAG with two instances of the same package - # that had inconsistent constraints. There's no way for a user to - # produce a spec like this (the parser adds all deps to the root), - # so this means OUR code is not sane! + # Here, the DAG contains two instances of the same package + # with inconsistent constraints. Users cannot produce + # inconsistent specs like this on the command line: the + # parser doesn't allow it. Spack must be broken! raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) - return flat_deps - def flatten(self): """Pull all dependencies up to the root (this spec). Merge constraints for dependencies with the same name, and if they conflict, throw an exception. """ - self.dependencies = self.flat_dependencies() + for dep in self.flat_dependencies(copy=False): + self._add_dependency(dep) def _normalize_helper(self, visited, spec_deps, provider_index): @@ -754,7 +796,7 @@ class Spec(object): dependency._normalize_helper(visited, spec_deps, provider_index) - def normalize(self): + def normalize(self, **kwargs): """When specs are parsed, any dependencies specified are hanging off the root, and ONLY the ones that were explicitly provided are there. Normalization turns a partial flat spec into a DAG, where: @@ -772,14 +814,18 @@ class Spec(object): TODO: normalize should probably implement some form of cycle detection, to ensure that the spec is actually a DAG. """ + if self._normal and not kwargs.get('force', False): + return + # Ensure first that all packages & compilers in the DAG exist. self.validate_names() - # Then ensure that the packages referenced are sane, that the - # provided spec is sane, and that all dependency specs are in the - # root node of the spec. flat_dependencies will do this for us. - spec_deps = self.flat_dependencies() - self.dependencies.clear() + # Ensure that the package & dep descriptions are consistent & sane + if not self.virtual: + self.package.validate_dependencies() + + # Get all the dependencies into one DependencyMap + spec_deps = self.flat_dependencies(copy=False) # Figure out which of the user-provided deps provide virtual deps. # Remove virtual deps that are already provided by something in the spec @@ -792,7 +838,7 @@ class Spec(object): # If there are deps specified but not visited, they're not # actually deps of this package. Raise an error. - extra = set(spec_deps.viewkeys()).difference(visited) + extra = set(spec_deps.keys()).difference(visited) # Also subtract out all the packags that provide a needed vpkg vdeps = [v for v in self.package.virtual_dependencies()] @@ -805,6 +851,9 @@ class Spec(object): raise InvalidDependencyException( self.name + " does not depend on " + comma_or(extra)) + # Mark the spec as normal once done. + self._normal = True + def normalized(self): """Return a normalized copy of this spec without modifying this spec.""" @@ -818,7 +867,7 @@ class Spec(object): If they're not, it will raise either UnknownPackageError or UnsupportedCompilerError. """ - for spec in self.preorder_traversal(): + for spec in self.traverse(): # Don't get a package for a virtual name. if not spec.virtual: spack.db.get(spec.name) @@ -886,17 +935,17 @@ class Spec(object): def common_dependencies(self, other): """Return names of dependencies that self an other have in common.""" common = set( - s.name for s in self.preorder_traversal(root=False)) + s.name for s in self.traverse(root=False)) common.intersection_update( - s.name for s in other.preorder_traversal(root=False)) + s.name for s in other.traverse(root=False)) return common def dep_difference(self, other): """Returns dependencies in self that are not in other.""" - mine = set(s.name for s in self.preorder_traversal(root=False)) + mine = set(s.name for s in self.traverse(root=False)) mine.difference_update( - s.name for s in other.preorder_traversal(root=False)) + s.name for s in other.traverse(root=False)) return mine @@ -955,8 +1004,8 @@ class Spec(object): return False # For virtual dependencies, we need to dig a little deeper. - self_index = ProviderIndex(self.preorder_traversal(), restrict=True) - other_index = ProviderIndex(other.preorder_traversal(), restrict=True) + self_index = ProviderIndex(self.traverse(), restrict=True) + other_index = ProviderIndex(other.traverse(), restrict=True) # This handles cases where there are already providers for both vpkgs if not self_index.satisfies(other_index): @@ -978,7 +1027,7 @@ class Spec(object): def virtual_dependencies(self): """Return list of any virtual deps in this spec.""" - return [spec for spec in self.preorder_traversal() if spec.virtual] + return [spec for spec in self.traverse() if spec.virtual] def _dup(self, other, **kwargs): @@ -993,21 +1042,31 @@ class Spec(object): Whether deps should be copied too. Set to false to copy a spec but not its dependencies. """ - # TODO: this needs to handle DAGs. + # Local node attributes get copied first. self.name = other.name self.versions = other.versions.copy() self.variants = other.variants.copy() self.architecture = other.architecture - self.compiler = None - if other.compiler: - self.compiler = other.compiler.copy() - + self.compiler = other.compiler.copy() if other.compiler else None self.dependents = DependencyMap() - copy_deps = kwargs.get('dependencies', True) - if copy_deps: - self.dependencies = other.dependencies.copy() - else: - self.dependencies = DependencyMap() + self.dependencies = DependencyMap() + + # If we copy dependencies, preserve DAG structure in the new spec + if kwargs.get('deps', True): + # This copies the deps from other using _dup(deps=False) + new_nodes = other.flat_dependencies() + new_nodes[self.name] = self + + # Hook everything up properly here by traversing. + for spec in other.traverse(cover='nodes'): + parent = new_nodes[spec.name] + for child in spec.dependencies: + if child not in parent.dependencies: + parent._add_dependency(new_nodes[child]) + + # Since we preserved structure, we can copy _normal safely. + self._normal = other._normal + self._concrete = other._concrete def copy(self, **kwargs): @@ -1029,7 +1088,7 @@ class Spec(object): def __getitem__(self, name): """TODO: reconcile __getitem__, _add_dependency, __contains__""" - for spec in self.preorder_traversal(): + for spec in self.traverse(): if spec.name == name: return spec @@ -1040,15 +1099,82 @@ class Spec(object): """True if this spec has any dependency that satisfies the supplied spec.""" spec = self._autospec(spec) - for s in self.preorder_traversal(): + for s in self.traverse(): if s.satisfies(spec): return True return False - def _cmp_key(self): + def sorted_deps(self): + """Return a list of all dependencies sorted by name.""" + deps = self.flat_dependencies() + return tuple(deps[name] for name in sorted(deps)) + + + def _eq_dag(self, other, vs, vo): + """Recursive helper for eq_dag and ne_dag. Does the actual DAG + traversal.""" + vs.add(id(self)) + vo.add(id(other)) + + if self.ne_node(other): + return False + + if len(self.dependencies) != len(other.dependencies): + return False + + ssorted = [self.dependencies[name] for name in sorted(self.dependencies)] + osorted = [other.dependencies[name] for name in sorted(other.dependencies)] + + for s, o in zip(ssorted, osorted): + visited_s = id(s) in vs + visited_o = id(o) in vo + + # Check for duplicate or non-equal dependencies + if visited_s != visited_o: return False + + # Skip visited nodes + if visited_s or visited_o: continue + + # Recursive check for equality + if not s._eq_dag(o, vs, vo): + return False + + return True + + + def eq_dag(self, other): + """True if the full dependency DAGs of specs are equal""" + return self._eq_dag(other, set(), set()) + + + def ne_dag(self, other): + """True if the full dependency DAGs of specs are not equal""" + return not self.eq_dag(other) + + + def _cmp_node(self): + """Comparison key for just *this node* and not its deps.""" return (self.name, self.versions, self.variants, - self.architecture, self.compiler, self.dependencies) + self.architecture, self.compiler) + + + def eq_node(self, other): + """Equality with another spec, not including dependencies.""" + return self._cmp_node() == other._cmp_node() + + + def ne_node(self, other): + """Inequality with another spec, not including dependencies.""" + return self._cmp_node() != other._cmp_node() + + + def _cmp_key(self): + """Comparison key for this node and all dependencies *without* + considering structure. This is the default, as + normalization will restore structure. + """ + return self._cmp_node() + (self.sorted_deps(),) def colorized(self): @@ -1151,12 +1277,12 @@ class Spec(object): return result + def dep_string(self): + return ''.join("^" + dep.format() for dep in self.sorted_deps()) + + def __str__(self): - by_name = lambda d: d.name - deps = self.preorder_traversal(key=by_name, root=False) - sorted_deps = sorted(deps, key=by_name) - dep_string = ''.join("^" + dep.format() for dep in sorted_deps) - return self.format() + dep_string + return self.format() + self.dep_string() def tree(self, **kwargs): @@ -1172,7 +1298,7 @@ class Spec(object): out = "" cur_id = 0 ids = {} - for d, node in self.preorder_traversal(cover=cover, depth=True): + for d, node in self.traverse(order='pre', cover=cover, depth=True): out += " " * indent if depth: out += "%-4d" % d @@ -1261,6 +1387,9 @@ class SpecParser(spack.parse.Parser): spec.dependents = DependencyMap() spec.dependencies = DependencyMap() + spec._normal = False + spec._concrete = False + # record this so that we know whether version is # unspecified or not. added_version = False |