summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorTodd Gamblin <tgamblin@llnl.gov>2013-10-17 14:46:00 -0700
committerTodd Gamblin <tgamblin@llnl.gov>2013-10-17 14:46:00 -0700
commit558cf7e40689fc18eca47bc80d34da555b5d27d5 (patch)
treeb1f09913630f0a6d925856ca7df925467a503160 /lib
parentdb07c7f611da4ddc6127dde28d48e5624e4f1172 (diff)
downloadspack-558cf7e40689fc18eca47bc80d34da555b5d27d5.tar.gz
spack-558cf7e40689fc18eca47bc80d34da555b5d27d5.tar.bz2
spack-558cf7e40689fc18eca47bc80d34da555b5d27d5.tar.xz
spack-558cf7e40689fc18eca47bc80d34da555b5d27d5.zip
spec flatten, normalize, validate; package validate
New operations for manipulating spec and package DAGs. For specs: flatten: gather all deps to the root normalize: Merge constraints and make spec match package DAG For packages: validate_dependencies: Make sure spec constraints in package DAG are sane. Added tests for above methods. Also added beginnings of concretization logic, to turn abstract spec into a concrete one. Still need proper tests for normalize().
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/cmd/test.py5
-rw-r--r--lib/spack/spack/compilers/__init__.py6
-rw-r--r--lib/spack/spack/concretize.py80
-rw-r--r--lib/spack/spack/package.py77
-rw-r--r--lib/spack/spack/packages/__init__.py20
-rw-r--r--lib/spack/spack/packages/libdwarf.py2
-rw-r--r--lib/spack/spack/parse.py6
-rw-r--r--lib/spack/spack/relations.py4
-rw-r--r--lib/spack/spack/spec.py292
-rw-r--r--lib/spack/spack/test/concretize.py4
-rw-r--r--lib/spack/spack/test/mock_packages/__init__.py0
-rw-r--r--lib/spack/spack/test/mock_packages/callpath.py14
-rw-r--r--lib/spack/spack/test/mock_packages/dyninst.py14
-rw-r--r--lib/spack/spack/test/mock_packages/libdwarf.py55
-rw-r--r--lib/spack/spack/test/mock_packages/libelf.py16
-rw-r--r--lib/spack/spack/test/mock_packages/mpich.py11
-rw-r--r--lib/spack/spack/test/mock_packages/mpileaks.py14
-rw-r--r--lib/spack/spack/test/spec_dag.py117
-rw-r--r--lib/spack/spack/test/spec_syntax.py (renamed from lib/spack/spack/test/specs.py)58
-rw-r--r--lib/spack/spack/util/executable.py1
-rw-r--r--lib/spack/spack/util/lang.py12
21 files changed, 641 insertions, 167 deletions
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index c6ac064001..80eeff34ac 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -19,14 +19,13 @@ def setup_parser(subparser):
def test(parser, args):
if args.all:
- for name in list_modules(spack.test_path):
+ for name in list_modules(spack.test_path, directories=False):
print "Running Tests: %s" % name
spack.test.run(name, verbose=args.verbose)
elif not args.names:
print "Available tests:"
- colify(list_modules(spack.test_path))
-
+ colify(list_modules(spack.test_path, directories=False))
else:
for name in args.names:
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 24a99a1bae..ad2260d58a 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -11,5 +11,7 @@ def supported_compilers():
return [c for c in list_modules(spack.compilers_path)]
-def get_compiler():
- return Compiler('gcc', spack.compilers.gcc.get_version())
+@memoized
+def default_compiler():
+ from spack.spec import Compiler
+ return Compiler('gcc', gcc.get_version())
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
new file mode 100644
index 0000000000..ac1d4421da
--- /dev/null
+++ b/lib/spack/spack/concretize.py
@@ -0,0 +1,80 @@
+"""
+Functions here are used to take abstract specs and make them concrete.
+For example, if a spec asks for a version between 1.8 and 1.9, these
+functions might take will take the most recent 1.9 version of the
+package available. Or, if the user didn't specify a compiler for a
+spec, then this will assign a compiler to the spec based on defaults
+or user preferences.
+
+TODO: make this customizable and allow users to configure
+ concretization policies.
+"""
+import spack.arch
+import spack.compilers
+from spack.version import *
+from spack.spec import *
+
+
+def concretize_version(spec):
+ """If the spec is already concrete, return. Otherwise take
+ the most recent available version, and default to the package's
+ version if there are no avaialble versions.
+ """
+ # return if already concrete.
+ if spec.versions.concrete:
+ return
+
+ pkg = speck.package
+ available = pkg.available_versions
+
+ # If there are known avaialble versions, return the most recent
+ if versions:
+ spec.versions = ver([avaialble[-1]])
+ else:
+ spec.versions = ver([pkg.version])
+
+
+def concretize_architecture(spec):
+ """If the spec already had an architecture, return. Otherwise if
+ the root of the DAG has an architecture, then use that.
+ Otherwise take the system's default architecture.
+
+ Intuition: Architectures won't be set a lot, and generally you
+ want the host system's architecture. When architectures are
+ mised in a spec, it is likely because the tool requries a
+ cross-compiled component, e.g. for tools that run on BlueGene
+ or Cray machines. These constraints will likely come directly
+ from packages, so require the user to be explicit if they want
+ to mess with the architecture, and revert to the default when
+ they're not explicit.
+ """
+ if spec.architecture is not None:
+ return
+
+ if spec.root.architecture:
+ spec.architecture = spec.root.architecture
+ else:
+ spec.architecture = spack.arch.sys_type()
+
+
+def concretize_compiler(spec):
+ """Currently just sets the compiler to gcc or throws an exception
+ if the compiler is set to something else.
+
+ TODO: implement below description.
+
+ If the spec already has a compiler, we're done. If not, then
+ take the compiler used for the nearest ancestor with a concrete
+ compiler, or use the system default if there is no ancestor
+ with a compiler.
+
+ Intuition: Use the system default if no package that depends on
+ this one has a strict compiler requirement. Otherwise, try to
+ build with the compiler that will be used by libraries that
+ link to this one, to maximize compatibility.
+ """
+ if spec.compiler.concrete:
+ if spec.compiler != spack.compilers.default_compiler():
+ raise spack.spec.UnknownCompilerError(str(spec.compiler))
+ else:
+ spec.compiler = spack.compilers.default_compiler()
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 9eece8afcf..cd87adb990 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -18,13 +18,13 @@ import shutil
from spack import *
import spack.spec
+import spack.error
import packages
import tty
import attr
import validate
import url
-from spec import Compiler
from version import *
from multi_function import platform
from stage import Stage
@@ -249,7 +249,7 @@ class Package(object):
# These variables are per-package metadata will be defined by subclasses.
#
"""By default a package has no dependencies."""
- dependencies = []
+ dependencies = {}
#
# These are default values for instance variables.
@@ -371,21 +371,51 @@ class Package(object):
return tuple(self._dependents)
- def sanity_check(self):
- """Ensure that this package and its dependencies don't have conflicting
- requirements."""
- deps = sorted(self.all_dependencies, key=lambda d: d.name)
+ def preorder_traversal(self, visited=None):
+ if visited is None:
+ visited = set()
+
+ if self.name in visited:
+ return
+ visited.add(self.name)
+
+ yield self
+ for name, spec in self.dependencies.iteritems():
+ for pkg in packages.get(name).preorder_traversal(visited):
+ yield pkg
+ def validate_dependencies(self):
+ """Ensure that this package and its dependencies all have consistent
+ constraints on them.
+ """
+ # This algorithm just attempts to merge all the constraints on the same
+ # package together, loses information about the source of the conflict.
+ # What we'd really like to know is exactly which two constraints
+ # conflict, but that algorithm is more expensive, so we'll do it
+ # the simple, less informative way for now.
+ merged = spack.spec.DependencyMap()
+
+ try:
+ for pkg in self.preorder_traversal():
+ for name, spec in pkg.dependencies.iteritems():
+ if name not in merged:
+ merged[name] = spec.copy()
+ else:
+ merged[name].constrain(spec)
+
+ except spack.spec.UnsatisfiableSpecError, e:
+ raise InvalidPackageDependencyError(
+ "Package %s has inconsistent dependency constraints: %s"
+ % (self.name, e.message))
+
@property
@memoized
def all_dependencies(self):
"""Dict(str -> Package) of all transitive dependencies of this package."""
- all_deps = set(self.dependencies)
- for dep in self.dependencies:
- dep_pkg = packages.get(dep.name)
- all_deps = all_deps.union(dep_pkg.all_dependencies)
+ all_deps = {name : dep for dep in self.preorder_traversal}
+ del all_deps[self.name]
return all_deps
@@ -533,7 +563,7 @@ class Package(object):
# Pass along prefixes of dependencies here
path_set(SPACK_DEPENDENCIES,
- [dep.package.prefix for dep in self.dependencies])
+ [dep.package.prefix for dep in self.dependencies.values()])
# Install location
os.environ[SPACK_PREFIX] = self.prefix
@@ -544,7 +574,7 @@ class Package(object):
def do_install_dependencies(self):
# Pass along paths of dependencies here
- for dep in self.dependencies:
+ for dep in self.dependencies.values():
dep.package.do_install()
@@ -607,7 +637,7 @@ class Package(object):
@property
def available_versions(self):
if not self._available_versions:
- self._available_versions = VersionList()
+ self._available_versions = ver([self.version])
try:
# Run curl but grab the mime type from the http headers
listing = spack.curl('-s', '-L', self.list_url, return_output=True)
@@ -617,18 +647,18 @@ class Package(object):
for s in strings:
match = re.search(wildcard, s)
if match:
- self._available_versions.add(ver(match.group(0)))
+ self._available_versions.add(Version(match.group(0)))
+
+ if not self._available_versions:
+ tty.warn("Found no versions for %s" % self.name,
+ "Packate.available_versions may require adding the list_url attribute",
+ "to the package to tell Spack where to look for versions.")
- except CalledProcessError:
+ except subprocess.CalledProcessError:
tty.warn("Fetching %s failed." % self.list_url,
"Package.available_versions requires an internet connection.",
"Version list may be incomplete.")
- if not self._available_versions:
- tty.warn("Found no versions for %s" % self.name,
- "Packate.available_versions may require adding the list_url attribute",
- "to the package to tell Spack where to look for versions.")
- self._available_versions = [self.version]
return self._available_versions
@@ -654,3 +684,10 @@ class MakeExecutable(Executable):
args = (jobs,) + args
super(MakeExecutable, self).__call__(*args, **kwargs)
+
+
+class InvalidPackageDependencyError(spack.error.SpackError):
+ """Raised when package specification is inconsistent with requirements of
+ its dependencies."""
+ def __init__(self, message):
+ super(InvalidPackageDependencyError, self).__init__(message)
diff --git a/lib/spack/spack/packages/__init__.py b/lib/spack/spack/packages/__init__.py
index 6d515274b6..8692dde5a8 100644
--- a/lib/spack/spack/packages/__init__.py
+++ b/lib/spack/spack/packages/__init__.py
@@ -20,6 +20,7 @@ invalid_package_re = r'[_-][_-]+'
instances = {}
+
def get(pkg_name):
if not pkg_name in instances:
package_class = get_class_for_package_name(pkg_name)
@@ -85,9 +86,18 @@ def get_class_for_package_name(pkg_name):
else:
raise UnknownPackageError(pkg_name)
+ # Figure out pacakges module from spack.packages_path
+ # This allows us to change the module path.
+ if not re.match(r'%s' % spack.module_path, spack.packages_path):
+ raise RuntimeError("Packages path is not a submodule of spack.")
+
+ # TODO: replace this with a proper package DB class, instead of this hackiness.
+ packages_path = re.sub(spack.module_path + '\/+', 'spack.', spack.packages_path)
+ packages_module = re.sub(r'\/', '.', packages_path)
+
class_name = pkg_name.capitalize()
try:
- module_name = "%s.%s" % (__name__, pkg_name)
+ module_name = "%s.%s" % (packages_module, pkg_name)
module = __import__(module_name, fromlist=[class_name])
except ImportError, e:
tty.die("Error while importing %s.%s:\n%s" % (pkg_name, class_name, e.message))
@@ -107,8 +117,8 @@ def compute_dependents():
if pkg._dependents is None:
pkg._dependents = []
- for dep in pkg.dependencies:
- dpkg = get(dep.name)
+ for name, dep in pkg.dependencies.iteritems():
+ dpkg = get(name)
if dpkg._dependents is None:
dpkg._dependents = []
dpkg._dependents.append(pkg.name)
@@ -130,8 +140,8 @@ def graph_dependencies(out=sys.stdout):
deps = []
for pkg in all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
- for dep in pkg.dependencies:
- deps.append((pkg.name, dep.name))
+ for dep_name, dep in pkg.dependencies.iteritems():
+ deps.append((pkg.name, dep_name))
out.write('\n')
for pair in deps:
diff --git a/lib/spack/spack/packages/libdwarf.py b/lib/spack/spack/packages/libdwarf.py
index edaba6a216..bae701b38b 100644
--- a/lib/spack/spack/packages/libdwarf.py
+++ b/lib/spack/spack/packages/libdwarf.py
@@ -11,7 +11,7 @@ class Libdwarf(Package):
list_url = "http://reality.sgiweb.org/davea/dwarf.html"
- depends_on("libelf@0:1")
+ depends_on("libelf")
def clean(self):
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index 5431aa454d..f2381b1078 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -91,12 +91,16 @@ class Parser(object):
self.next_token_error("Unexpected end of input")
sys.exit(1)
- def parse(self, text):
+ def setup(self, text):
self.text = text
self.push_tokens(self.lexer.lex(text))
+
+ def parse(self, text):
+ self.setup(text)
return self.do_parse()
+
class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing."""
def __init__(self, message, string, pos):
diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py
index 8eb4e2d1a2..cc690ffc0f 100644
--- a/lib/spack/spack/relations.py
+++ b/lib/spack/spack/relations.py
@@ -54,10 +54,10 @@ def depends_on(*specs):
"""
# Get the enclosing package's scope and add deps to it.
locals = sys._getframe(1).f_locals
- dependencies = locals.setdefault("dependencies", [])
+ dependencies = locals.setdefault("dependencies", {})
for string in specs:
for spec in spack.spec.parse(string):
- dependencies.append(spec)
+ dependencies[spec.name] = spec
def provides(*args):
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index edc97c7c3b..2731c344ab 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -67,6 +67,7 @@ from StringIO import StringIO
import tty
import spack.parse
import spack.error
+import spack.concretize
import spack.compilers
import spack.compilers.gcc
import spack.packages as packages
@@ -137,9 +138,8 @@ class Compiler(object):
def constrain(self, other):
- if not self.satisfies(other.compiler):
- raise UnsatisfiableCompilerSpecError(
- "%s does not satisfy %s" % (self.compiler, other.compiler))
+ if not self.satisfies(other):
+ raise UnsatisfiableCompilerSpecError(self, other)
self.versions.intersect(other.versions)
@@ -149,23 +149,6 @@ class Compiler(object):
return self.versions.concrete
- def _concretize(self):
- """If this spec could describe more than one version, variant, or build
- of a package, this will resolve it to be concrete.
- """
- # TODO: support compilers other than GCC.
- if self.concrete:
- return
- gcc_version = spack.compilers.gcc.get_version()
- self.versions = VersionList([gcc_version])
-
-
- def concretized(self):
- clone = self.copy()
- clone._concretize()
- return clone
-
-
@property
def version(self):
if not self.concrete:
@@ -243,13 +226,34 @@ class DependencyMap(HashableMap):
@key_ordering
class Spec(object):
- def __init__(self, name):
- self.name = name
- self.versions = VersionList()
- self.variants = VariantMap()
- self.architecture = None
- self.compiler = None
- self.dependencies = DependencyMap()
+ def __init__(self, spec_like):
+ # Copy if spec_like is a Spec.
+ if type(spec_like) == Spec:
+ self._dup(spec_like)
+ return
+
+ # Parse if the spec_like is a string.
+ if type(spec_like) != str:
+ raise TypeError("Can't make spec out of %s" % type(spec_like))
+
+ spec_list = SpecParser().parse(spec_like)
+ if len(spec_list) > 1:
+ raise ValueError("More than one spec in string: " + spec_like)
+ if len(spec_list) < 1:
+ raise ValueError("String contains no specs: " + spec_like)
+
+ # Take all the attributes from the first parsed spec without copying
+ # This is a little bit nasty, but it's nastier to make the parser
+ # write directly into this Spec object.
+ other = spec_list[0]
+ self.name = other.name
+ self.parent = other.parent
+ self.versions = other.versions
+ self.variants = other.variants
+ self.architecture = other.architecture
+ self.compiler = other.compiler
+ self.dependencies = other.dependencies
+
#
# Private routines here are called by the parser when building a spec.
@@ -285,6 +289,21 @@ class Spec(object):
if dep.name in self.dependencies:
raise DuplicateDependencyError("Cannot depend on '%s' twice" % dep)
self.dependencies[dep.name] = dep
+ dep.parent = self
+
+
+ @property
+ def root(self):
+ """Follow parent links and find the root of this spec's DAG."""
+ root = self
+ while root.parent is not None:
+ root = root.parent
+ return root
+
+
+ @property
+ def package(self):
+ return packages.get(self.name)
@property
@@ -296,6 +315,20 @@ class Spec(object):
and self.dependencies.concrete)
+ def preorder_traversal(self, visited=None):
+ if visited is None:
+ visited = set()
+
+ if id(self) in visited:
+ return
+ visited.add(id(self))
+
+ yield self
+ for dep in self.dependencies.itervalues():
+ for spec in dep.preorder_traversal(visited):
+ yield spec
+
+
def _concretize(self):
"""A spec is concrete if it describes one build of a package uniquely.
This will ensure that this spec is concrete.
@@ -327,30 +360,40 @@ class Spec(object):
# TODO: handle variants.
- pkg = packages.get(self.name)
-
# Take the highest version in a range
if not self.versions.concrete:
- preferred = self.versions.highest() or pkg.version
+ preferred = self.versions.highest() or self.package.version
self.versions = VersionList([preferred])
# Ensure dependencies have right versions
- @property
- def traverse_deps(self, visited=None):
- """Yields dependencies in depth-first order"""
- if not visited:
- visited = set()
+ def flatten(self):
+ """Pull all dependencies up to the root (this spec).
+ Merge constraints for dependencies with the same name, and if they
+ conflict, throw an exception. """
+ # This ensures that the package descriptions themselves are consistent
+ self.package.validate_dependencies()
+
+ # Once that is guaranteed, we know any constraint violations are due
+ # to the spec -- so they're the user's fault, not Spack's.
+ flat_deps = DependencyMap()
+ try:
+ for spec in self.preorder_traversal():
+ if spec.name not in flat_deps:
+ flat_deps[spec.name] = spec
+ else:
+ flat_deps[spec.name].constrain(spec)
- for name in sorted(self.dependencies.keys()):
- dep = dependencies[name]
- if dep in visited:
- continue
+ except UnsatisfiableSpecError, e:
+ # This REALLY shouldn't happen unless something is wrong in spack.
+ # It means we got a spec DAG with two instances of the same package
+ # that had inconsistent constraints. There's no way for a user to
+ # produce a spec like this (the parser adds all deps to the root),
+ # so this means OUR code is not sane!
+ raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
- for d in dep.traverse_deps(seen):
- yield d
- yield dep
+ self.dependencies = flat_deps
def _normalize_helper(self, visited, spec_deps):
@@ -362,9 +405,7 @@ class Spec(object):
# Combine constraints from package dependencies with
# information in this spec's dependencies.
pkg = packages.get(self.name)
- for pkg_dep in pkg.dependencies:
- name = pkg_dep.name
-
+ for name, pkg_dep in self.package.dependencies.iteritems():
if name not in spec_deps:
# Clone the spec from the package
spec_deps[name] = pkg_dep.copy()
@@ -372,23 +413,29 @@ class Spec(object):
try:
# intersect package information with spec info
spec_deps[name].constrain(pkg_dep)
+
except UnsatisfiableSpecError, e:
- error_type = type(e)
- raise error_type(
- "Violated depends_on constraint from package %s: %s"
- % (self.name, e.message))
+ e.message = "Invalid spec: '%s'. "
+ e.message += "Package %s requires %s %s, but spec asked for %s"
+ e.message %= (spec_deps[name], name, e.constraint_type,
+ e.required, e.provided)
+ raise e
# Add merged spec to my deps and recurse
- self.dependencies[name] = spec_deps[name]
+ self._add_dependency(spec_deps[name])
self.dependencies[name]._normalize_helper(visited, spec_deps)
def normalize(self):
- if any(dep.dependencies for dep in self.dependencies.values()):
- raise SpecError("Spec has already been normalized.")
-
+ # Ensure first that all packages exist.
self.validate_package_names()
+ # Then ensure that the packages mentioned are sane, that the
+ # provided spec is sane, and that all dependency specs are in the
+ # root node of the spec. Flatten will do this for us.
+ self.flatten()
+
+ # Now that we're flat we can get all our dependencies at once.
spec_deps = self.dependencies
self.dependencies = DependencyMap()
@@ -404,29 +451,25 @@ class Spec(object):
def validate_package_names(self):
- for name in self.dependencies:
- packages.get(name)
+ packages.get(self.name)
+ for name, dep in self.dependencies.iteritems():
+ dep.validate_package_names()
def constrain(self, other):
if not self.versions.overlaps(other.versions):
- raise UnsatisfiableVersionSpecError(
- "%s does not satisfy %s" % (self.versions, other.versions))
+ raise UnsatisfiableVersionSpecError(self.versions, other.versions)
- conflicting_variants = [
- v for v in other.variants if v in self.variants and
- self.variants[v].enabled != other.variants[v].enabled]
-
- if conflicting_variants:
- raise UnsatisfiableVariantSpecError(comma_and(
- "%s does not satisfy %s" % (self.variants[v], other.variants[v])
- for v in conflicting_variants))
+ for v in other.variants:
+ if (v in self.variants and
+ self.variants[v].enabled != other.variants[v].enabled):
+ raise UnsatisfiableVariantSpecError(self.variants[v],
+ other.variants[v])
if self.architecture is not None and other.architecture is not None:
if self.architecture != other.architecture:
- raise UnsatisfiableArchitectureSpecError(
- "Asked for architecture %s, but required %s"
- % (self.architecture, other.architecture))
+ raise UnsatisfiableArchitectureSpecError(self.architecture,
+ other.architecture)
if self.compiler is not None and other.compiler is not None:
self.compiler.constrain(other.compiler)
@@ -457,16 +500,23 @@ class Spec(object):
return clone
+ def _dup(self, other):
+ """Copy the spec other into self. This is a
+ first-party, overwriting copy."""
+ # TODO: this needs to handle DAGs.
+ self.name = other.name
+ self.versions = other.versions.copy()
+ self.variants = other.variants.copy()
+ self.architecture = other.architecture
+ self.compiler = None
+ if other.compiler:
+ self.compiler = other.compiler.copy()
+ self.dependencies = other.dependencies.copy()
+
+
def copy(self):
- clone = Spec(self.name)
- clone.versions = self.versions.copy()
- clone.variants = self.variants.copy()
- clone.architecture = self.architecture
- clone.compiler = None
- if self.compiler:
- clone.compiler = self.compiler.copy()
- clone.dependencies = self.dependencies.copy()
- return clone
+ """Return a deep copy of this spec."""
+ return Spec(self)
@property
@@ -478,7 +528,7 @@ class Spec(object):
def _cmp_key(self):
return (self.name, self.versions, self.variants,
- self.architecture, self.compiler)
+ self.architecture, self.compiler, self.dependencies)
def colorized(self):
@@ -505,7 +555,7 @@ class Spec(object):
def tree(self, indent=""):
"""Prints out this spec and its dependencies, tree-formatted
- with indentation."""
+ with indentation. Each node also has an id."""
out = indent + self.str_without_deps()
for dep in sorted(self.dependencies.keys()):
out += "\n" + self.dependencies[dep].tree(indent + " ")
@@ -566,8 +616,22 @@ class SpecParser(spack.parse.Parser):
def spec(self):
+ """Parse a spec out of the input. If a spec is supplied, then initialize
+ and return it instead of creating a new one."""
self.check_identifier()
- spec = Spec(self.token.value)
+
+ # This will init the spec without calling __init__.
+ spec = Spec.__new__(Spec)
+ spec.name = self.token.value
+ spec.parent = None
+ spec.versions = VersionList()
+ spec.variants = VariantMap()
+ spec.architecture = None
+ spec.compiler = None
+ spec.dependencies = DependencyMap()
+
+ # record this so that we know whether version is
+ # unspecified or not.
added_version = False
while self.next:
@@ -661,34 +725,10 @@ class SpecParser(spack.parse.Parser):
def parse(string):
- """Returns a list of specs from an input string."""
- return SpecParser().parse(string)
-
-
-def parse_one(string):
- """Parses a string containing only one spec, then returns that
- spec. If more than one spec is found, raises a ValueError.
+ """Returns a list of specs from an input string.
+ For creating one spec, see Spec() constructor.
"""
- spec_list = parse(string)
- if len(spec_list) > 1:
- raise ValueError("string contains more than one spec!")
- elif len(spec_list) < 1:
- raise ValueError("string contains no specs!")
- return spec_list[0]
-
-
-def make_spec(spec_like):
- if type(spec_like) == str:
- specs = parse(spec_like)
- if len(specs) != 1:
- raise ValueError("String contains multiple specs: '%s'" % spec_like)
- return specs[0]
-
- elif type(spec_like) == Spec:
- return spec_like
-
- else:
- raise TypeError("Can't make spec out of %s" % type(spec_like))
+ return SpecParser().parse(string)
class SpecError(spack.error.SpackError):
@@ -728,6 +768,13 @@ class DuplicateArchitectureError(SpecError):
super(DuplicateArchitectureError, self).__init__(message)
+class InconsistentSpecError(SpecError):
+ """Raised when two nodes in the same spec DAG have inconsistent
+ constraints."""
+ def __init__(self, message):
+ super(InconsistentSpecError, self).__init__(message)
+
+
class InvalidDependencyException(SpecError):
"""Raised when a dependency in a spec is not actually a dependency
of the package."""
@@ -736,30 +783,39 @@ class InvalidDependencyException(SpecError):
class UnsatisfiableSpecError(SpecError):
- """Raised when a spec conflicts with package constraints."""
- def __init__(self, message):
- super(UnsatisfiableSpecError, self).__init__(message)
+ """Raised when a spec conflicts with package constraints.
+ Provide the requirement that was violated when raising."""
+ def __init__(self, provided, required, constraint_type):
+ super(UnsatisfiableSpecError, self).__init__(
+ "%s does not satisfy %s" % (provided, required))
+ self.provided = provided
+ self.required = required
+ self.constraint_type = constraint_type
class UnsatisfiableVersionSpecError(UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints."""
- def __init__(self, message):
- super(UnsatisfiableVersionSpecError, self).__init__(message)
+ def __init__(self, provided, required):
+ super(UnsatisfiableVersionSpecError, self).__init__(
+ provided, required, "version")
class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints."""
- def __init__(self, message):
- super(UnsatisfiableCompilerSpecError, self).__init__(message)
+ def __init__(self, provided, required):
+ super(UnsatisfiableCompilerSpecError, self).__init__(
+ provided, required, "compiler")
class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
- def __init__(self, message):
- super(UnsatisfiableVariantSpecError, self).__init__(message)
+ def __init__(self, provided, required):
+ super(UnsatisfiableVariantSpecError, self).__init__(
+ provided, required, "variant")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
- def __init__(self, message):
- super(UnsatisfiableArchitectureSpecError, self).__init__(message)
+ def __init__(self, provided, required):
+ super(UnsatisfiableArchitectureSpecError, self).__init__(
+ provided, required, "architecture")
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 3a528f1b16..f010d09c0e 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -1,11 +1,11 @@
import unittest
-import spack.spec
+from spack.spec import Spec
class ConcretizeTest(unittest.TestCase):
def check_concretize(self, abstract_spec):
- abstract = spack.spec.parse_one(abstract_spec)
+ abstract = Spec(abstract_spec)
print abstract
print abstract.concretized()
print abstract.concretized().concrete
diff --git a/lib/spack/spack/test/mock_packages/__init__.py b/lib/spack/spack/test/mock_packages/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/__init__.py
diff --git a/lib/spack/spack/test/mock_packages/callpath.py b/lib/spack/spack/test/mock_packages/callpath.py
new file mode 100644
index 0000000000..958960e0ab
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/callpath.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Callpath(Package):
+ homepage = "https://github.com/tgamblin/callpath"
+ url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
+ md5 = "foobarbaz"
+
+ depends_on("dyninst")
+ depends_on("mpich")
+
+ def install(self, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/lib/spack/spack/test/mock_packages/dyninst.py b/lib/spack/spack/test/mock_packages/dyninst.py
new file mode 100644
index 0000000000..f550cde54f
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/dyninst.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Dyninst(Package):
+ homepage = "https://paradyn.org"
+ url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
+ md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
+
+ depends_on("libelf")
+ depends_on("libdwarf")
+
+ def install(self, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/lib/spack/spack/test/mock_packages/libdwarf.py b/lib/spack/spack/test/mock_packages/libdwarf.py
new file mode 100644
index 0000000000..bae701b38b
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/libdwarf.py
@@ -0,0 +1,55 @@
+from spack import *
+import os
+
+# Only build certain parts of dwarf because the other ones break.
+dwarf_dirs = ['libdwarf', 'dwarfdump2']
+
+class Libdwarf(Package):
+ homepage = "http://reality.sgiweb.org/davea/dwarf.html"
+ url = "http://reality.sgiweb.org/davea/libdwarf-20130207.tar.gz"
+ md5 = "64b42692e947d5180e162e46c689dfbf"
+
+ list_url = "http://reality.sgiweb.org/davea/dwarf.html"
+
+ depends_on("libelf")
+
+
+ def clean(self):
+ for dir in dwarf_dirs:
+ with working_dir(dir):
+ if os.path.exists('Makefile'):
+ make('clean')
+
+
+ def install(self, prefix):
+ # dwarf build does not set arguments for ar properly
+ make.add_default_arg('ARFLAGS=rcs')
+
+ # Dwarf doesn't provide an install, so we have to do it.
+ mkdirp(bin, include, lib, man1)
+
+ with working_dir('libdwarf'):
+ configure("--prefix=%s" % prefix, '--enable-shared')
+ make()
+
+ install('libdwarf.a', lib)
+ install('libdwarf.so', lib)
+ install('libdwarf.h', include)
+ install('dwarf.h', include)
+
+ with working_dir('dwarfdump2'):
+ configure("--prefix=%s" % prefix)
+
+ # This makefile has strings of copy commands that
+ # cause a race in parallel
+ make(parallel=False)
+
+ install('dwarfdump', bin)
+ install('dwarfdump.conf', lib)
+ install('dwarfdump.1', man1)
+
+
+ @platform('macosx_10.8_x86_64')
+ def install(self, prefix):
+ raise UnsupportedPlatformError(
+ "libdwarf doesn't currently build on Mac OS X.")
diff --git a/lib/spack/spack/test/mock_packages/libelf.py b/lib/spack/spack/test/mock_packages/libelf.py
new file mode 100644
index 0000000000..7e3046b174
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/libelf.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class Libelf(Package):
+ homepage = "http://www.mr511.de/software/english.html"
+ url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
+ md5 = "4136d7b4c04df68b686570afa26988ac"
+
+ def install(self, prefix):
+ configure("--prefix=%s" % prefix,
+ "--enable-shared",
+ "--disable-dependency-tracking",
+ "--disable-debug")
+ make()
+
+ # The mkdir commands in libelf's intsall can fail in parallel
+ make("install", parallel=False)
diff --git a/lib/spack/spack/test/mock_packages/mpich.py b/lib/spack/spack/test/mock_packages/mpich.py
new file mode 100644
index 0000000000..d8cd67d528
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/mpich.py
@@ -0,0 +1,11 @@
+from spack import *
+
+class Mpich(Package):
+ homepage = "http://www.mpich.org"
+ url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
+ md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
+
+ def install(self, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/lib/spack/spack/test/mock_packages/mpileaks.py b/lib/spack/spack/test/mock_packages/mpileaks.py
new file mode 100644
index 0000000000..224557cc52
--- /dev/null
+++ b/lib/spack/spack/test/mock_packages/mpileaks.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Mpileaks(Package):
+ homepage = "http://www.llnl.gov"
+ url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
+ md5 = "foobarbaz"
+
+ depends_on("mpich")
+ depends_on("callpath")
+
+ def install(self, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
new file mode 100644
index 0000000000..a9dd0ec41c
--- /dev/null
+++ b/lib/spack/spack/test/spec_dag.py
@@ -0,0 +1,117 @@
+"""
+These tests check validation of dummy packages. You can find the dummy
+packages directories that these tests use in:
+
+ spack/lib/spack/spack/test/mock_packages
+
+Each test validates conditions with the packages in those directories.
+"""
+import unittest
+
+import spack
+import spack.package
+import spack.packages as packages
+
+from spack.util.lang import new_path, list_modules
+from spack.spec import Spec
+
+mock_packages_path = new_path(spack.module_path, 'test', 'mock_packages')
+
+
+def set_pkg_dep(pkg, spec):
+ """Alters dependence information for a pacakge.
+ Use this to mock up constraints.
+ """
+ spec = Spec(spec)
+ packages.get(pkg).dependencies[spec.name] = spec
+
+
+class ValidationTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ # Use a different packages directory for these tests. We want to use
+ # mocked up packages that don't interfere with the real ones.
+ cls.real_packages_path = spack.packages_path
+ spack.packages_path = mock_packages_path
+
+ # First time through, record original relationships bt/w packages
+ cls.original_deps = {}
+ for name in list_modules(mock_packages_path):
+ pkg = packages.get(name)
+ cls.original_deps[name] = [
+ spec for spec in pkg.dependencies.values()]
+
+
+ @classmethod
+ def restore(cls):
+ # each time through restore original dependencies & constraints
+ for pkg_name, deps in cls.original_deps.iteritems():
+ packages.get(pkg_name).dependencies.clear()
+ for dep in deps:
+ set_pkg_dep(pkg_name, dep)
+
+ @classmethod
+ def tearDownClass(cls):
+ """Restore the real packages path after any test."""
+ cls.restore()
+ spack.packages_path = cls.real_packages_path
+
+
+ def setUp(self):
+ """Before each test, restore deps between packages to original state."""
+ ValidationTest.restore()
+
+
+ def test_conflicting_package_constraints(self):
+ set_pkg_dep('mpileaks', 'mpich@1.0')
+ set_pkg_dep('callpath', 'mpich@2.0')
+
+ spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.package.InvalidPackageDependencyError,
+ spec.package.validate_dependencies)
+
+
+ def test_conflicting_spec_constraints(self):
+ mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
+ try:
+ mpileaks.package.validate_dependencies()
+ except spack.package.InvalidPackageDependencyError, e:
+ self.fail("validate_dependencies raised an exception: %s", e.message)
+
+ # Normalize then add conflicting constraints to the DAG (this is an
+ # extremely unlikely scenario, but we test for it anyway)
+ mpileaks.normalize()
+ mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
+ mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
+
+ self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
+
+
+ def test_unsatisfiable_version(self):
+ set_pkg_dep('mpileaks', 'mpich@1.0')
+ spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
+
+
+ def test_unsatisfiable_compiler(self):
+ set_pkg_dep('mpileaks', 'mpich%gcc')
+ spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
+
+
+ def test_unsatisfiable_compiler_version(self):
+ set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
+ spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
+
+
+ def test_unsatisfiable_variant(self):
+ set_pkg_dep('mpileaks', 'mpich+debug')
+ spec = Spec('mpileaks ^mpich~debug ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableVariantSpecError, spec.normalize)
+
+
+ def test_unsatisfiable_architecture(self):
+ set_pkg_dep('mpileaks', 'mpich=bgqos_0')
+ spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
diff --git a/lib/spack/spack/test/specs.py b/lib/spack/spack/test/spec_syntax.py
index cb8bf79ff8..33534a4c1d 100644
--- a/lib/spack/spack/test/specs.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -61,23 +61,40 @@ class SpecTest(unittest.TestCase):
def check_satisfies(self, lspec, rspec):
- l = spack.spec.parse_one(lspec)
- r = spack.spec.parse_one(rspec)
- self.assertTrue(l.satisfies(r) and r.satisfies(l))
+ l, r = Spec(lspec), Spec(rspec)
+ self.assertTrue(l.satisfies(r))
+ self.assertTrue(r.satisfies(l))
- # These should not raise
- l.constrain(r)
- r.constrain(l)
+ try:
+ l.constrain(r)
+ r.constrain(l)
+ except SpecError, e:
+ self.fail("Got a SpecError in constrain!", e.message)
+
+
+ def assert_unsatisfiable(lspec, rspec):
+ l, r = Spec(lspec), Spec(rspec)
+ self.assertFalse(l.satisfies(r))
+ self.assertFalse(r.satisfies(l))
+
+ self.assertRaises(l.constrain, r)
+ self.assertRaises(r.constrain, l)
def check_constrain(self, expected, constrained, constraint):
- exp = spack.spec.parse_one(expected)
- constrained = spack.spec.parse_one(constrained)
- constraint = spack.spec.parse_one(constraint)
+ exp = Spec(expected)
+ constrained = Spec(constrained)
+ constraint = Spec(constraint)
constrained.constrain(constraint)
self.assertEqual(exp, constrained)
+ def check_invalid_constraint(self, constrained, constraint):
+ constrained = Spec(constrained)
+ constraint = Spec(constraint)
+ self.assertRaises(UnsatisfiableSpecError, constrained.constrain, constraint)
+
+
# ================================================================================
# Parse checks
# ===============================================================================
@@ -145,7 +162,28 @@ class SpecTest(unittest.TestCase):
def test_constrain(self):
- self.check_constrain('libelf@0:1', 'libelf', 'libelf@0:1')
+ self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
+ self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6',
+ 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7')
+
+ self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
+ self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
+
+ self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
+ self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
+
+ self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
+ self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
+
+
+ def test_invalid_constraint(self):
+ self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
+ self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
+
+ self.check_invalid_constraint('libelf+debug', 'libelf~debug')
+ self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
+
+ self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
# ================================================================================
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 99b52ea299..d44420ab72 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -1,4 +1,5 @@
import os
+import re
import subprocess
import spack.tty as tty
diff --git a/lib/spack/spack/util/lang.py b/lib/spack/spack/util/lang.py
index 92532c109f..90546c3f2f 100644
--- a/lib/spack/spack/util/lang.py
+++ b/lib/spack/spack/util/lang.py
@@ -5,6 +5,9 @@ import functools
import inspect
from spack.util.filesystem import new_path
+# Ignore emacs backups when listing modules
+ignore_modules = [r'^\.#', '~$']
+
def has_method(cls, name):
for base in inspect.getmro(cls):
@@ -27,21 +30,24 @@ def memoized(obj):
return memoizer
-def list_modules(directory):
+def list_modules(directory, **kwargs):
"""Lists all of the modules, excluding __init__.py, in
a particular directory."""
+ list_directories = kwargs.setdefault('directories', True)
+
for name in os.listdir(directory):
if name == '__init__.py':
continue
path = new_path(directory, name)
- if os.path.isdir(path):
+ if list_directories and os.path.isdir(path):
init_py = new_path(path, '__init__.py')
if os.path.isfile(init_py):
yield name
elif name.endswith('.py'):
- yield re.sub('.py$', '', name)
+ if not any(re.search(pattern, name) for pattern in ignore_modules):
+ yield re.sub('.py$', '', name)
def key_ordering(cls):