From 3b1898b8e479fc1e7d9b71a57f625f36485b1ac0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 26 Apr 2015 13:12:02 -0700 Subject: Fix SPACK-40: Finish adding variant directive. - Variants are now declarable in packages using the variant() directive. - Variants are checked - you can't just ask for a random variant, it has to be declared. - conditional logic (@when, if, '+debug' in spec, etc.) still required in package to implement variant. --- var/spack/mock_packages/mpich/package.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'var') diff --git a/var/spack/mock_packages/mpich/package.py b/var/spack/mock_packages/mpich/package.py index 75a939a892..f77d3efc5d 100644 --- a/var/spack/mock_packages/mpich/package.py +++ b/var/spack/mock_packages/mpich/package.py @@ -30,6 +30,9 @@ class Mpich(Package): list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 + variant('debug', default=False, + description="Compile MPICH with debug flags.") + version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') version('3.0.3', 'foobarbaz') version('3.0.2', 'foobarbaz') -- cgit v1.2.3-70-g09d2 From 43e546559285621e439d30df974fe9b8d49c5381 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 10 May 2015 17:56:27 -0700 Subject: Fix bug in directory layout hidden files() --- lib/spack/spack/directory_layout.py | 2 +- lib/spack/spack/package.py | 5 ++++- var/spack/packages/python/package.py | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) (limited to 'var') diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index c2e2ea4deb..fe02fff3b0 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -182,7 +182,7 @@ class YamlDirectoryLayout(DirectoryLayout): @property def hidden_file_paths(self): - return (self.metadata_dir) + return (self.metadata_dir,) def relative_path_for_spec(self, spec): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b0bb1fb7bc..9ddd55f5c0 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -984,8 +984,10 @@ class Package(object): self._sanity_check_extension() force = kwargs.get('force', False) - spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec) + spack.install_layout.check_extension_conflict( + self.extendee_spec, self.spec) + # Activate any package dependencies that are also extensions. if not force: for spec in self.spec.traverse(root=False): if spec.package.extends(self.extendee_spec): @@ -1016,6 +1018,7 @@ class Package(object): conflict = tree.find_conflict(self.prefix, ignore=ignore) if conflict: raise ExtensionConflictError(conflict) + tree.merge(self.prefix, ignore=ignore) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 31a12ea653..797900527d 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -139,7 +139,9 @@ class Python(Package): def activate(self, ext_pkg, **args): - args.update(ignore=self.python_ignore(ext_pkg, args)) + ignore=self.python_ignore(ext_pkg, args) + args.update(ignore=ignore) + super(Python, self).activate(ext_pkg, **args) exts = spack.install_layout.extension_map(self.spec) -- cgit v1.2.3-70-g09d2 From cd5fa128c5d65bc169cec68b4333a67e70dacc4b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 12 May 2015 09:56:59 -0700 Subject: Work on SPACK-41: Optional dependencies work for simple conditions. - Can depend conditionally based on variant, compiler, arch, deps, etc - normalize() is not iterative yet: no chaining depends_ons - really need a SAT solver, but iterative will at least handle simple cases. - Added "strict" option to Spec.satisfies() - strict checks that ALL of other's constraints are met (not just the ones self shares) - Consider splitting these out into two methods: could_satisfy() and satisfies() - didn't do this yet as it would require changing code that uses satisfies() - Changed semantics of __contains__ to use strict satisfaction (SPACK-56) - Added tests for optional dependencies. - The constrain() method on Specs, compilers, versions, etc. now returns whether the spec changed as a result of the call. --- lib/spack/spack/directives.py | 67 ++++--- lib/spack/spack/package.py | 43 ---- lib/spack/spack/spec.py | 218 ++++++++++++++------- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/mock_packages_test.py | 2 +- lib/spack/spack/test/optional_deps.py | 86 ++++++++ lib/spack/spack/test/spec_dag.py | 12 +- lib/spack/spack/version.py | 30 ++- var/spack/mock_packages/a/package.py | 12 ++ var/spack/mock_packages/b/package.py | 12 ++ var/spack/mock_packages/c/package.py | 12 ++ var/spack/mock_packages/e/package.py | 12 ++ .../mock_packages/optional-dep-test-2/package.py | 18 ++ .../mock_packages/optional-dep-test/package.py | 29 +++ 14 files changed, 397 insertions(+), 159 deletions(-) create mode 100644 lib/spack/spack/test/optional_deps.py create mode 100644 var/spack/mock_packages/a/package.py create mode 100644 var/spack/mock_packages/b/package.py create mode 100644 var/spack/mock_packages/c/package.py create mode 100644 var/spack/mock_packages/e/package.py create mode 100644 var/spack/mock_packages/optional-dep-test-2/package.py create mode 100644 var/spack/mock_packages/optional-dep-test/package.py (limited to 'var') diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 5c17fe4044..9297d6dac3 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -115,10 +115,7 @@ class directive(object): """ - def __init__(self, **kwargs): - # dict argument allows directives to have storage on the package. - dicts = kwargs.get('dicts', None) - + def __init__(self, dicts=None): if isinstance(dicts, basestring): dicts = (dicts,) elif type(dicts) not in (list, tuple): @@ -154,13 +151,14 @@ class directive(object): return wrapped -@directive(dicts='versions') +@directive('versions') def version(pkg, ver, checksum=None, **kwargs): """Adds a version and metadata describing how to fetch it. Metadata is just stored as a dict in the package's versions dictionary. Package must turn it into a valid fetch strategy later. """ + # TODO: checksum vs md5 distinction is confusing -- fix this. # special case checksum for backward compatibility if checksum: kwargs['md5'] = checksum @@ -169,18 +167,29 @@ def version(pkg, ver, checksum=None, **kwargs): pkg.versions[Version(ver)] = kwargs -@directive(dicts='dependencies') -def depends_on(pkg, *specs): - """Adds a dependencies local variable in the locals of - the calling class, based on args. """ - for string in specs: - for spec in spack.spec.parse(string): - if pkg.name == spec.name: - raise CircularReferenceError('depends_on', pkg.name) - pkg.dependencies[spec.name] = spec +def _depends_on(pkg, spec, when=None): + if when is None: + when = pkg.name + when_spec = parse_anonymous_spec(when, pkg.name) + + dep_spec = Spec(spec) + if pkg.name == dep_spec.name: + raise CircularReferenceError('depends_on', pkg.name) + conditions = pkg.dependencies.setdefault(dep_spec.name, {}) + if when_spec in conditions: + conditions[when_spec].constrain(dep_spec, deps=False) + else: + conditions[when_spec] = dep_spec -@directive(dicts=('extendees', 'dependencies')) + +@directive('dependencies') +def depends_on(pkg, spec, when=None): + """Creates a dict of deps with specs defining when they apply.""" + _depends_on(pkg, spec, when=when) + + +@directive(('extendees', 'dependencies')) def extends(pkg, spec, **kwargs): """Same as depends_on, but dependency is symlinked into parent prefix. @@ -198,14 +207,12 @@ def extends(pkg, spec, **kwargs): if pkg.extendees: raise DirectiveError("Packages can extend at most one other package.") - spec = Spec(spec) - if pkg.name == spec.name: - raise CircularReferenceError('extends', pkg.name) - pkg.dependencies[spec.name] = spec - pkg.extendees[spec.name] = (spec, kwargs) + when = kwargs.pop('when', pkg.name) + _depends_on(pkg, spec, when=when) + pkg.extendees[spec] = (Spec(spec), kwargs) -@directive(dicts='provided') +@directive('provided') def provides(pkg, *specs, **kwargs): """Allows packages to provide a virtual dependency. If a package provides 'mpi', other packages can declare that they depend on "mpi", and spack @@ -221,17 +228,17 @@ def provides(pkg, *specs, **kwargs): pkg.provided[provided_spec] = provider_spec -@directive(dicts='patches') -def patch(pkg, url_or_filename, **kwargs): +@directive('patches') +def patch(pkg, url_or_filename, level=1, when=None): """Packages can declare patches to apply to source. You can optionally provide a when spec to indicate that a particular patch should only be applied when the package's spec meets certain conditions (e.g. a particular version). """ - level = kwargs.get('level', 1) - when = kwargs.get('when', pkg.name) - + if when is None: + when = pkg.name when_spec = parse_anonymous_spec(when, pkg.name) + if when_spec not in pkg.patches: pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)] else: @@ -240,13 +247,13 @@ def patch(pkg, url_or_filename, **kwargs): pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level)) -@directive(dicts='variants') -def variant(pkg, name, **kwargs): +@directive('variants') +def variant(pkg, name, default=False, description=""): """Define a variant for the package. Packager can specify a default value (on or off) as well as a text description.""" - default = bool(kwargs.get('default', False)) - description = str(kwargs.get('description', "")).strip() + default = bool(default) + description = str(description).strip() if not re.match(spack.spec.identifier_re, name): raise DirectiveError("Invalid variant name in %s: '%s'" % (pkg.name, name)) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index ea3b46088a..452544be49 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -50,7 +50,6 @@ from llnl.util.filesystem import * from llnl.util.lang import * import spack -import spack.spec import spack.error import spack.compilers import spack.mirror @@ -540,41 +539,6 @@ class Package(object): yield pkg - def validate_dependencies(self): - """Ensure that this package and its dependencies all have consistent - constraints on them. - - NOTE that this will NOT find sanity problems through a virtual - dependency. Virtual deps complicate the problem because we - don't know in advance which ones conflict with others in the - dependency DAG. If there's more than one virtual dependency, - it's a full-on SAT problem, so hold off on this for now. - The vdeps are actually skipped in preorder_traversal, so see - that for details. - - TODO: investigate validating virtual dependencies. - """ - # This algorithm just attempts to merge all the constraints on the same - # package together, loses information about the source of the conflict. - # What we'd really like to know is exactly which two constraints - # conflict, but that algorithm is more expensive, so we'll do it - # the simple, less informative way for now. - merged = spack.spec.DependencyMap() - - try: - for pkg in self.preorder_traversal(): - for name, spec in pkg.dependencies.iteritems(): - if name not in merged: - merged[name] = spec.copy() - else: - merged[name].constrain(spec) - - except spack.spec.UnsatisfiableSpecError, e: - raise InvalidPackageDependencyError( - "Package %s has inconsistent dependency constraints: %s" - % (self.name, e.message)) - - def provides(self, vpkg_name): """True if this package provides a virtual package with the specified name.""" return vpkg_name in self.provided @@ -1198,13 +1162,6 @@ class PackageError(spack.error.SpackError): super(PackageError, self).__init__(message, long_msg) -class InvalidPackageDependencyError(PackageError): - """Raised when package specification is inconsistent with requirements of - its dependencies.""" - def __init__(self, message): - super(InvalidPackageDependencyError, self).__init__(message) - - class PackageVersionError(PackageError): """Raised when a version URL cannot automatically be determined.""" def __init__(self, version): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index f2625ae596..69b0a70445 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -222,20 +222,24 @@ class CompilerSpec(object): return CompilerSpec(compiler_spec_like) - def satisfies(self, other): + def satisfies(self, other, strict=False): other = self._autospec(other) return (self.name == other.name and - self.versions.satisfies(other.versions)) + self.versions.satisfies(other.versions, strict=strict)) def constrain(self, other): + """Intersect self's versions with other. + + Return whether the CompilerSpec changed. + """ other = self._autospec(other) # ensure that other will actually constrain this spec. if not other.satisfies(self): raise UnsatisfiableCompilerSpecError(other, self) - self.versions.intersect(other.versions) + return self.versions.intersect(other.versions) @property @@ -316,8 +320,8 @@ class VariantMap(HashableMap): self.spec = spec - def satisfies(self, other): - if self.spec._concrete: + def satisfies(self, other, strict=False): + if strict or self.spec._concrete: return all(k in self and self[k].enabled == other[k].enabled for k in other) else: @@ -326,17 +330,25 @@ class VariantMap(HashableMap): def constrain(self, other): + """Add all variants in other that aren't in self to self. + + Raises an error if any common variants don't match. + Return whether the spec changed. + """ if other.spec._concrete: for k in self: if k not in other: raise UnsatisfiableVariantSpecError(self[k], '') + changed = False for k in other: if k in self: if self[k].enabled != other[k].enabled: raise UnsatisfiableVariantSpecError(self[k], other[k]) else: self[k] = other[k].copy() + changed =True + return changed @property def concrete(self): @@ -867,6 +879,59 @@ class Spec(object): self._add_dependency(dep) + def _evaluate_dependency_conditions(self, name): + """Evaluate all the conditions on a dependency with this name. + + If the package depends on in this configuration, return + the dependency. If no conditions are True (and we don't + depend on it), return None. + """ + pkg = spack.db.get(self.name) + conditions = pkg.dependencies[name] + + # evaluate when specs to figure out constraints on the dependency. + dep = None + for when_spec, dep_spec in conditions.items(): + sat = self.satisfies(when_spec, strict=True) +# print self, "satisfies", when_spec, ":", sat + if sat: + if dep is None: + dep = Spec(name) + try: + dep.constrain(dep_spec) + except UnsatisfiableSpecError, e: + e.message = ("Conflicting conditional dependencies on package " + "%s for spec %s" % (self.name, self)) + raise e + return dep + + + def _find_provider(self, vdep, provider_index): + """Find provider for a virtual spec in the provider index. + Raise an exception if there is a conflicting virtual + dependency already in this spec. + """ + assert(vdep.virtual) + providers = provider_index.providers_for(vdep) + + # If there is a provider for the vpkg, then use that instead of + # the virtual package. + if providers: + # Can't have multiple providers for the same thing in one spec. + if len(providers) > 1: + raise MultipleProviderError(vdep, providers) + return providers[0] + else: + # The user might have required something insufficient for + # pkg_dep -- so we'll get a conflict. e.g., user asked for + # mpi@:1.1 but some package required mpi@2.1:. + required = provider_index.providers_for(vdep.name) + if len(required) > 1: + raise MultipleProviderError(vdep, required) + elif required: + raise UnsatisfiableProviderSpecError(required[0], vdep) + + def _normalize_helper(self, visited, spec_deps, provider_index): """Recursive helper function for _normalize.""" if self.name in visited: @@ -881,34 +946,22 @@ class Spec(object): # Combine constraints from package dependencies with # constraints on the spec's dependencies. pkg = spack.db.get(self.name) - for name, pkg_dep in self.package.dependencies.items(): + for name in pkg.dependencies: + # If pkg_dep is None, no conditions matched and we don't depend on this. + pkg_dep = self._evaluate_dependency_conditions(name) + if not pkg_dep: + continue + # If it's a virtual dependency, try to find a provider if pkg_dep.virtual: - providers = provider_index.providers_for(pkg_dep) - - # If there is a provider for the vpkg, then use that instead of - # the virtual package. - if providers: - # Can't have multiple providers for the same thing in one spec. - if len(providers) > 1: - raise MultipleProviderError(pkg_dep, providers) - - pkg_dep = providers[0] - name = pkg_dep.name - - else: - # The user might have required something insufficient for - # pkg_dep -- so we'll get a conflict. e.g., user asked for - # mpi@:1.1 but some package required mpi@2.1:. - required = provider_index.providers_for(name) - if len(required) > 1: - raise MultipleProviderError(pkg_dep, required) - elif required: - raise UnsatisfiableProviderSpecError( - required[0], pkg_dep) + visited.add(pkg_dep.name) + provider = self._find_provider(pkg_dep, provider_index) + if provider: + pkg_dep = provider + name = provider.name else: - # if it's a real dependency, check whether it provides something - # already required in the spec. + # if it's a real dependency, check whether it provides + # something already required in the spec. index = ProviderIndex([pkg_dep], restrict=True) for vspec in (v for v in spec_deps.values() if v.virtual): if index.providers_for(vspec): @@ -966,19 +1019,14 @@ class Spec(object): # Ensure first that all packages & compilers in the DAG exist. self.validate_names() - # Ensure that the package & dep descriptions are consistent & sane - if not self.virtual: - self.package.validate_dependencies() - # Get all the dependencies into one DependencyMap spec_deps = self.flat_dependencies(copy=False) - # Figure out which of the user-provided deps provide virtual deps. - # Remove virtual deps that are already provided by something in the spec - spec_packages = [d.package for d in spec_deps.values() if not d.virtual] - + # Initialize index of virtual dependency providers index = ProviderIndex(spec_deps.values(), restrict=True) + # traverse the package DAG and fill out dependencies according + # to package files & their 'when' specs visited = set() self._normalize_helper(visited, spec_deps, index) @@ -986,12 +1034,6 @@ class Spec(object): # actually deps of this package. Raise an error. extra = set(spec_deps.keys()).difference(visited) - # Also subtract out all the packags that provide a needed vpkg - vdeps = [v for v in self.package.virtual_dependencies()] - - vpkg_providers = index.providers_for(*vdeps) - extra.difference_update(p.name for p in vpkg_providers) - # Anything left over is not a valid part of the spec. if extra: raise InvalidDependencyException( @@ -1030,6 +1072,10 @@ class Spec(object): def constrain(self, other, **kwargs): + """Merge the constraints of other with self. + + Returns True if the spec changed as a result, False if not. + """ other = self._autospec(other) constrain_deps = kwargs.get('deps', True) @@ -1055,18 +1101,22 @@ class Spec(object): elif self.compiler is None: self.compiler = other.compiler - self.versions.intersect(other.versions) - self.variants.constrain(other.variants) + changed = False + changed |= self.versions.intersect(other.versions) + changed |= self.variants.constrain(other.variants) + changed |= bool(self.architecture) self.architecture = self.architecture or other.architecture if constrain_deps: - self._constrain_dependencies(other) + changed |= self._constrain_dependencies(other) + + return changed def _constrain_dependencies(self, other): """Apply constraints of other spec's dependencies to this spec.""" if not self.dependencies or not other.dependencies: - return + return False # TODO: might want more detail than this, e.g. specific deps # in violation. if this becomes a priority get rid of this @@ -1075,12 +1125,17 @@ class Spec(object): raise UnsatisfiableDependencySpecError(other, self) # Handle common first-order constraints directly + changed = False for name in self.common_dependencies(other): - self[name].constrain(other[name], deps=False) + changed |= self[name].constrain(other[name], deps=False) + # Update with additional constraints from other spec for name in other.dep_difference(self): self._add_dependency(other[name].copy()) + changed = True + + return changed def common_dependencies(self, other): @@ -1114,46 +1169,72 @@ class Spec(object): return parse_anonymous_spec(spec_like, self.name) - def satisfies(self, other, **kwargs): + def satisfies(self, other, deps=True, strict=False): + """Determine if this spec satisfies all constraints of another. + + There are two senses for satisfies: + + * `loose` (default): the absence of a constraint in self + implies that it *could* be satisfied by other, so we only + check that there are no conflicts with other for + constraints that this spec actually has. + + * `strict`: strict means that we *must* meet all the + constraints specified on other. + """ other = self._autospec(other) - satisfy_deps = kwargs.get('deps', True) # First thing we care about is whether the name matches if self.name != other.name: return False - # All these attrs have satisfies criteria of their own, - # but can be None to indicate no constraints. - for s, o in ((self.versions, other.versions), - (self.compiler, other.compiler)): - if s and o and not s.satisfies(o): + if self.versions and other.versions: + if not self.versions.satisfies(other.versions, strict=strict): return False + elif strict and (self.versions or other.versions): + return False - if not self.variants.satisfies(other.variants): + # None indicates no constraints when not strict. + if self.compiler and other.compiler: + if not self.compiler.satisfies(other.compiler, strict=strict): + return False + elif strict and (other.compiler and not self.compiler): + return False + + if not self.variants.satisfies(other.variants, strict=strict): return False # Architecture satisfaction is currently just string equality. - # Can be None for unconstrained, though. - if (self.architecture and other.architecture and - self.architecture != other.architecture): + # If not strict, None means unconstrained. + if self.architecture and other.architecture: + if self.architecture != other.architecture: + return False + elif strict and (other.architecture and not self.architecture): return False # If we need to descend into dependencies, do it, otherwise we're done. - if satisfy_deps: - return self.satisfies_dependencies(other) + if deps: + return self.satisfies_dependencies(other, strict=strict) else: return True - def satisfies_dependencies(self, other): + def satisfies_dependencies(self, other, strict=False): """This checks constraints on common dependencies against each other.""" - # if either spec doesn't restrict dependencies then both are compatible. - if not self.dependencies or not other.dependencies: + if strict: + if other.dependencies and not self.dependencies: + return False + + if not all(dep in self.dependencies for dep in other.dependencies): + return False + + elif not self.dependencies or not other.dependencies: + # if either spec doesn't restrict dependencies then both are compatible. return True # Handle first-order constraints directly for name in self.common_dependencies(other): - if not self[name].satisfies(other[name]): + if not self[name].satisfies(other[name], deps=False): return False # For virtual dependencies, we need to dig a little deeper. @@ -1255,7 +1336,7 @@ class Spec(object): """ spec = self._autospec(spec) for s in self.traverse(): - if s.satisfies(spec): + if s.satisfies(spec, strict=True): return True return False @@ -1411,7 +1492,8 @@ class Spec(object): elif compiler: if c == '@': - if self.compiler and self.compiler.versions: + if (self.compiler and self.compiler.versions and + self.compiler.versions != _any_version): write(c + str(self.compiler.versions), '%') elif c == '$': escape = True diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 77c8bd3191..7ff512c370 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -53,7 +53,8 @@ test_names = ['versions', 'url_extrapolate', 'cc', 'link_tree', - 'spec_yaml'] + 'spec_yaml', + 'optional_deps'] def list_tests(): diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index e948376039..09fb9ebe30 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -35,7 +35,7 @@ def set_pkg_dep(pkg, spec): Use this to mock up constraints. """ spec = Spec(spec) - spack.db.get(pkg).dependencies[spec.name] = spec + spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec } class MockPackagesTest(unittest.TestCase): diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py new file mode 100644 index 0000000000..4d8f86a33e --- /dev/null +++ b/lib/spack/spack/test/optional_deps.py @@ -0,0 +1,86 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import unittest + +import spack +from spack.spec import Spec, CompilerSpec +from spack.test.mock_packages_test import * + +class ConcretizeTest(MockPackagesTest): + + def check_normalize(self, spec_string, expected): + spec = Spec(spec_string) + spec.normalize() + self.assertEqual(spec, expected) + self.assertTrue(spec.eq_dag(expected)) + + + def test_normalize_simple_conditionals(self): + self.check_normalize('optional-dep-test', Spec('optional-dep-test')) + self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a')) + + self.check_normalize('optional-dep-test+a', + Spec('optional-dep-test+a', Spec('a'))) + + self.check_normalize('optional-dep-test@1.1', + Spec('optional-dep-test@1.1', Spec('b'))) + + self.check_normalize('optional-dep-test%intel', + Spec('optional-dep-test%intel', Spec('c'))) + + self.check_normalize('optional-dep-test%intel@64.1', + Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d'))) + + self.check_normalize('optional-dep-test%intel@64.1.2', + Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d'))) + + self.check_normalize('optional-dep-test%clang@35', + Spec('optional-dep-test%clang@35', Spec('e'))) + + + def test_multiple_conditionals(self): + self.check_normalize('optional-dep-test+a@1.1', + Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b'))) + + self.check_normalize('optional-dep-test+a%intel', + Spec('optional-dep-test+a%intel', Spec('a'), Spec('c'))) + + self.check_normalize('optional-dep-test@1.1%intel', + Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c'))) + + self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a', + Spec('optional-dep-test@1.1%intel@64.1.2+a', + Spec('b'), Spec('a'), Spec('c'), Spec('d'))) + + self.check_normalize('optional-dep-test@1.1%clang@36.5+a', + Spec('optional-dep-test@1.1%clang@36.5+a', + Spec('b'), Spec('a'), Spec('e'))) + + + def test_chained_mpi(self): + self.check_normalize('optional-dep-test-2+mpi', + Spec('optional-dep-test-2+mpi', + Spec('optional-dep-test+mpi', + Spec('mpi')))) diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index ecbc46981c..549f829d3e 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -44,8 +44,11 @@ class SpecDagTest(MockPackagesTest): set_pkg_dep('callpath', 'mpich@2.0') spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') - self.assertRaises(spack.package.InvalidPackageDependencyError, - spec.package.validate_dependencies) + + # TODO: try to do something to showt that the issue was with + # TODO: the user's input or with package inconsistencies. + self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, + spec.normalize) def test_preorder_node_traversal(self): @@ -140,11 +143,6 @@ class SpecDagTest(MockPackagesTest): def test_conflicting_spec_constraints(self): mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') - try: - mpileaks.package.validate_dependencies() - except spack.package.InvalidPackageDependencyError, e: - self.fail("validate_dependencies raised an exception: %s" - % e.message) # Normalize then add conflicting constraints to the DAG (this is an # extremely unlikely scenario, but we test for it anyway) diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index 61b1e328ce..35db05e018 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -93,12 +93,12 @@ def coerce_versions(a, b): def coerced(method): """Decorator that ensures that argument types of a method are coerced.""" @wraps(method) - def coercing_method(a, b): + def coercing_method(a, b, *args, **kwargs): if type(a) == type(b) or a is None or b is None: - return method(a, b) + return method(a, b, *args, **kwargs) else: ca, cb = coerce_versions(a, b) - return getattr(ca, method.__name__)(cb) + return getattr(ca, method.__name__)(cb, *args, **kwargs) return coercing_method @@ -607,15 +607,22 @@ class VersionList(object): @coerced - def satisfies(self, other): - """A VersionList satisfies another if some version in the list would - would satisfy some version in the other list. This uses essentially - the same algorithm as overlaps() does for VersionList, but it calls - satisfies() on member Versions and VersionRanges. + def satisfies(self, other, strict=False): + """A VersionList satisfies another if some version in the list + would satisfy some version in the other list. This uses + essentially the same algorithm as overlaps() does for + VersionList, but it calls satisfies() on member Versions + and VersionRanges. + + If strict is specified, this version list must lie entirely + *within* the other in order to satisfy it. """ if not other or not self: return False + if strict: + return self in other + s = o = 0 while s < len(self) and o < len(other): if self[s].satisfies(other[o]): @@ -652,9 +659,14 @@ class VersionList(object): @coerced def intersect(self, other): + """Intersect this spec's list with other. + + Return True if the spec changed as a result; False otherwise + """ isection = self.intersection(other) + changed = (isection.versions != self.versions) self.versions = isection.versions - + return changed @coerced def __contains__(self, other): diff --git a/var/spack/mock_packages/a/package.py b/var/spack/mock_packages/a/package.py new file mode 100644 index 0000000000..fa63c08df0 --- /dev/null +++ b/var/spack/mock_packages/a/package.py @@ -0,0 +1,12 @@ +from spack import * + +class A(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/b/package.py b/var/spack/mock_packages/b/package.py new file mode 100644 index 0000000000..cb88aa2157 --- /dev/null +++ b/var/spack/mock_packages/b/package.py @@ -0,0 +1,12 @@ +from spack import * + +class B(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/b-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/c/package.py b/var/spack/mock_packages/c/package.py new file mode 100644 index 0000000000..f51b913fa9 --- /dev/null +++ b/var/spack/mock_packages/c/package.py @@ -0,0 +1,12 @@ +from spack import * + +class C(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/e/package.py b/var/spack/mock_packages/e/package.py new file mode 100644 index 0000000000..76c6b64c7f --- /dev/null +++ b/var/spack/mock_packages/e/package.py @@ -0,0 +1,12 @@ +from spack import * + +class E(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/e-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/optional-dep-test-2/package.py b/var/spack/mock_packages/optional-dep-test-2/package.py new file mode 100644 index 0000000000..ef0587588e --- /dev/null +++ b/var/spack/mock_packages/optional-dep-test-2/package.py @@ -0,0 +1,18 @@ +from spack import * + +class OptionalDepTest2(Package): + """Depends on the optional-dep-test package""" + + homepage = "http://www.example.com" + url = "http://www.example.com/optional-dep-test-2-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + variant('odt', default=False) + variant('mpi', default=False) + + depends_on('optional-dep-test', when='+odt') + depends_on('optional-dep-test+mpi', when='+mpi') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/optional-dep-test/package.py b/var/spack/mock_packages/optional-dep-test/package.py new file mode 100644 index 0000000000..bb57576ca9 --- /dev/null +++ b/var/spack/mock_packages/optional-dep-test/package.py @@ -0,0 +1,29 @@ +from spack import * + +class OptionalDepTest(Package): + """Description""" + + homepage = "http://www.example.com" + url = "http://www.example.com/optional_dep_test-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + version('1.1', '0123456789abcdef0123456789abcdef') + + variant('a', default=False) + variant('f', default=False) + variant('mpi', default=False) + + depends_on('a', when='+a') + depends_on('b', when='@1.1') + depends_on('c', when='%intel') + depends_on('d', when='%intel@64.1') + depends_on('e', when='%clang@34:40') + + depends_on('f', when='+f') + depends_on('g', when='^f') + depends_on('mpi', when='^g') + + depends_on('mpi', when='+mpi') + + def install(self, spec, prefix): + pass -- cgit v1.2.3-70-g09d2 From 46b91ddf57beb54f05fc6a3cc70283d4b17d1bd3 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 18 May 2015 15:19:20 -0700 Subject: YAML config files for compilers and mirrors --- .gitignore | 1 + lib/spack/spack/cmd/compiler.py | 2 +- lib/spack/spack/cmd/config.py | 31 +- lib/spack/spack/cmd/mirror.py | 18 +- lib/spack/spack/compilers/__init__.py | 43 +- lib/spack/spack/config.py | 719 +++++++++------------ lib/spack/spack/stage.py | 8 +- lib/spack/spack/test/config.py | 63 +- lib/spack/spack/test/mock_packages_test.py | 27 +- var/spack/mock_configs/site_spackconfig | 12 - .../mock_configs/site_spackconfig/compilers.yaml | 12 + var/spack/mock_configs/user_spackconfig | 0 12 files changed, 395 insertions(+), 541 deletions(-) delete mode 100644 var/spack/mock_configs/site_spackconfig create mode 100644 var/spack/mock_configs/site_spackconfig/compilers.yaml delete mode 100644 var/spack/mock_configs/user_spackconfig (limited to 'var') diff --git a/.gitignore b/.gitignore index 828fb04e7d..1c6ca4c99e 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ *~ .DS_Store .idea +/etc/spack/* /etc/spackconfig /share/spack/dotkit /share/spack/modules diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index e37f44b3b7..2a64dc914e 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -68,7 +68,7 @@ def compiler_add(args): spack.compilers.add_compilers_to_config('user', *compilers) n = len(compilers) tty.msg("Added %d new compiler%s to %s" % ( - n, 's' if n > 1 else '', spack.config.get_filename('user'))) + n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers'))) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers") diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index 283bfc19b9..8c18f88b64 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -43,42 +43,27 @@ def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command') - set_parser = sp.add_parser('set', help='Set configuration values.') - set_parser.add_argument('key', help="Key to set value for.") - set_parser.add_argument('value', nargs='?', default=None, - help="Value to associate with key") - - get_parser = sp.add_parser('get', help='Get configuration values.') - get_parser.add_argument('key', help="Key to get value for.") + get_parser = sp.add_parser('get', help='Print configuration values.') + get_parser.add_argument('category', help="Configuration category to print.") edit_parser = sp.add_parser('edit', help='Edit configuration file.') - - -def config_set(args): - # default scope for writing is 'user' - if not args.scope: - args.scope = 'user' - - config = spack.config.get_config(args.scope) - config.set_value(args.key, args.value) - config.write() + edit_parser.add_argument('category', help="Configuration category to edit") def config_get(args): - config = spack.config.get_config(args.scope) - print config.get_value(args.key) + spack.config.print_category(args.category) def config_edit(args): if not args.scope: args.scope = 'user' - config_file = spack.config.get_filename(args.scope) + if not args.category: + args.category = None + config_file = spack.config.get_config_scope_filename(args.scope, args.category) spack.editor(config_file) def config(parser, args): - action = { 'set' : config_set, - 'get' : config_get, + action = { 'get' : config_get, 'edit' : config_edit } action[args.config_command](args) - diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 22838e1344..02a1467ee6 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -75,27 +75,22 @@ def mirror_add(args): if url.startswith('/'): url = 'file://' + url - config = spack.config.get_config('user') - config.set_value('mirror', args.name, 'url', url) - config.write() + mirror_dict = { args.name : url } + spack.config.add_to_mirror_config({ args.name : url }) def mirror_remove(args): """Remove a mirror by name.""" - config = spack.config.get_config('user') name = args.name - if not config.has_named_section('mirror', name): + rmd_something = spack.config.remove_from_config('mirrors', name) + if not rmd_something: tty.die("No such mirror: %s" % name) - config.remove_named_section('mirror', name) - config.write() def mirror_list(args): """Print out available mirrors to the console.""" - config = spack.config.get_config() - sec_names = config.get_section_names('mirror') - + sec_names = spack.config.get_mirror_config() if not sec_names: tty.msg("No mirrors configured.") return @@ -103,8 +98,7 @@ def mirror_list(args): max_len = max(len(s) for s in sec_names) fmt = "%%-%ds%%s" % (max_len + 4) - for name in sec_names: - val = config.get_value('mirror', name, 'url') + for name, val in sec_names.iteritems(): print fmt % (name, val) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 8cb11c3208..b7b021a1ac 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -60,24 +60,25 @@ def _get_config(): first.""" # If any configuration file has compilers, just stick with the # ones already configured. - config = spack.config.get_config() + config = spack.config.get_compilers_config() existing = [spack.spec.CompilerSpec(s) - for s in config.get_section_names('compiler')] + for s in config] if existing: return config compilers = find_compilers(*get_path('PATH')) - new_compilers = [ - c for c in compilers if c.spec not in existing] - add_compilers_to_config('user', *new_compilers) + add_compilers_to_config('user', *compilers) # After writing compilers to the user config, return a full config # from all files. - return spack.config.get_config(refresh=True) + return spack.config.get_compilers_config() -@memoized +_cached_default_compiler = None def default_compiler(): + global _cached_default_compiler + if _cached_default_compiler: + return _cached_default_compiler versions = [] for name in _default_order: # TODO: customize order. versions = find(name) @@ -86,7 +87,8 @@ def default_compiler(): if not versions: raise NoCompilersError() - return sorted(versions)[-1] + _cached_default_compiler = sorted(versions)[-1] + return _cached_default_compiler def find_compilers(*path): @@ -122,19 +124,17 @@ def find_compilers(*path): def add_compilers_to_config(scope, *compilers): - config = spack.config.get_config(scope) + compiler_config_tree = {} for compiler in compilers: - add_compiler(config, compiler) - config.write() - - -def add_compiler(config, compiler): - def setup_field(cspec, name, exe): - path = exe if exe else "None" - config.set_value('compiler', cspec, name, path) + compiler_entry = {} + for c in _required_instance_vars: + val = getattr(compiler, c) + if not val: + val = "None" + compiler_entry[c] = val + compiler_config_tree[str(compiler.spec)] = compiler_entry + spack.config.add_to_compiler_config(compiler_config_tree, scope) - for c in _required_instance_vars: - setup_field(compiler.spec, c, getattr(compiler, c)) def supported_compilers(): @@ -157,8 +157,7 @@ def all_compilers(): available to build with. These are instances of CompilerSpec. """ configuration = _get_config() - return [spack.spec.CompilerSpec(s) - for s in configuration.get_section_names('compiler')] + return [spack.spec.CompilerSpec(s) for s in configuration] @_auto_compiler_spec @@ -176,7 +175,7 @@ def compilers_for_spec(compiler_spec): config = _get_config() def get_compiler(cspec): - items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec)) + items = config[str(cspec)] if not all(n in items for n in _required_instance_vars): raise InvalidCompilerConfigurationError(cspec) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 85ee16a1c2..34dee86473 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -28,452 +28,315 @@ Configuration file scopes =============================== When Spack runs, it pulls configuration data from several config -files, much like bash shells. In Spack, there are two configuration -scopes: +directories, each of which contains configuration files. In Spack, +there are two configuration scopes: 1. ``site``: Spack loads site-wide configuration options from - ``$(prefix)/etc/spackconfig``. + ``$(prefix)/etc/spack/``. 2. ``user``: Spack next loads per-user configuration options from - ~/.spackconfig. - -If user options have the same names as site options, the user options -take precedence. + ~/.spack/. +Spack may read configuration files from both of these locations. When +configurations conflict, the user config options take precedence over +the site configurations. Each configuration directory may contain +several configuration files, such as compilers.yaml or mirrors.yaml. Configuration file format =============================== -Configuration files are formatted using .gitconfig syntax, which is -much like Windows .INI format. This format is implemented by Python's -ConfigParser class, and it's easy to read and versatile. - -The file is divided into sections, like this ``compiler`` section:: - - [compiler] - cc = /usr/bin/gcc - -In each section there are options (cc), and each option has a value -(/usr/bin/gcc). - -Borrowing from git, we also allow named sections, e.g.: - - [compiler "gcc@4.7.3"] - cc = /usr/bin/gcc - -This is a compiler section, but it's for the specific compiler, -``gcc@4.7.3``. ``gcc@4.7.3`` is the name. - - -Keys -=============================== - -Together, the section, name, and option, separated by periods, are -called a ``key``. Keys can be used on the command line to set -configuration options explicitly (this is also borrowed from git). - -For example, to change the C compiler used by gcc@4.7.3, you could do -this: - - spack config compiler.gcc@4.7.3.cc /usr/local/bin/gcc - -That will create a named compiler section in the user's .spackconfig -like the one shown above. +Configuration files are formatted using YAML syntax. +This format is implemented by Python's +yaml class, and it's easy to read and versatile. + +The config files are structured as trees, like this ``compiler`` section:: + + compilers: + chaos_5_x86_64_ib: + gcc@4.4.7: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + bgqos_0: + xlc@12.1: + cc: /usr/local/bin/mpixlc + ... + +In this example, entries like ''compilers'' and ''xlc@12.1'' are used to +categorize entries beneath them in the tree. At the root of the tree, +entries like ''cc'' and ''cxx'' are specified as name/value pairs. + +Spack returns these trees as nested dicts. The dict for the above example +would looks like: + + { 'compilers' : + { 'chaos_5_x86_64_ib' : + { 'gcc@4.4.7' : + { 'cc' : '/usr/bin/gcc', + 'cxx' : '/usr/bin/g++' + 'f77' : '/usr/bin/gfortran' + 'fc' : '/usr/bin/gfortran' } + } + { 'bgqos_0' : + { 'cc' : '/usr/local/bin/mpixlc' } + } + } + +Some routines, like get_mirrors_config and get_compilers_config may strip +off the top-levels of the tree and return subtrees. """ import os -import re -import inspect -import ConfigParser as cp +import exceptions +import sys from external.ordereddict import OrderedDict from llnl.util.lang import memoized import spack.error -__all__ = [ - 'SpackConfigParser', 'get_config', 'SpackConfigurationError', - 'InvalidConfigurationScopeError', 'InvalidSectionNameError', - 'ReadOnlySpackConfigError', 'ConfigParserError', 'NoOptionError', - 'NoSectionError'] - -_named_section_re = r'([^ ]+) "([^"]+)"' +from contextlib import closing +from external import yaml +from external.yaml.error import MarkedYAMLError +import llnl.util.tty as tty +from llnl.util.filesystem import mkdirp + +_config_sections = {} +class _ConfigCategory: + name = None + filename = None + merge = True + def __init__(self, n, f, m): + self.name = n + self.filename = f + self.merge = m + self.files_read_from = [] + self.result_dict = {} + _config_sections[n] = self + +_ConfigCategory('compilers', 'compilers.yaml', True) +_ConfigCategory('mirrors', 'mirrors.yaml', True) +_ConfigCategory('view', 'views.yaml', True) +_ConfigCategory('order', 'orders.yaml', True) """Names of scopes and their corresponding configuration files.""" -_scopes = OrderedDict({ - 'site' : os.path.join(spack.etc_path, 'spackconfig'), - 'user' : os.path.expanduser('~/.spackconfig') -}) - -_field_regex = r'^([\w-]*)' \ - r'(?:\.(.*(?=.)))?' \ - r'(?:\.([\w-]+))?$' - -_section_regex = r'^([\w-]*)\s*' \ - r'\"([^"]*\)\"$' - - -# Cache of configs -- we memoize this for performance. -_config = {} - -def get_config(scope=None, **kwargs): - """Get a Spack configuration object, which can be used to set options. - - With no arguments, this returns a SpackConfigParser with config - options loaded from all config files. This is how client code - should read Spack configuration options. - - Optionally, a scope parameter can be provided. Valid scopes - are ``site`` and ``user``. If a scope is provided, only the - options from that scope's configuration file are loaded. The - caller can set or unset options, then call ``write()`` on the - config object to write it back out to the original config file. - - By default, this will cache configurations and return the last - read version of the config file. If the config file is - modified and you need to refresh, call get_config with the - refresh=True keyword argument. This will force all files to be - re-read. - """ - refresh = kwargs.get('refresh', False) - if refresh: - _config.clear() - - if scope not in _config: - if scope is None: - _config[scope] = SpackConfigParser([path for path in _scopes.values()]) - elif scope not in _scopes: - raise UnknownConfigurationScopeError(scope) +config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), + ('user', os.path.expanduser('~/.spack'))] + +_compiler_by_arch = {} +_read_config_file_result = {} +def _read_config_file(filename): + """Read a given YAML configuration file""" + global _read_config_file_result + if filename in _read_config_file_result: + return _read_config_file_result[filename] + + try: + with open(filename) as f: + ydict = yaml.load(f) + except MarkedYAMLError, e: + tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) + except exceptions.IOError, e: + _read_config_file_result[filename] = None + return None + _read_config_file_result[filename] = ydict + return ydict + + +def clear_config_caches(): + """Clears the caches for configuration files, which will cause them + to be re-read upon the next request""" + for key,s in _config_sections.iteritems(): + s.files_read_from = [] + s.result_dict = {} + spack.config._read_config_file_result = {} + spack.config._compiler_by_arch = {} + spack.compilers._cached_default_compiler = None + + +def _merge_dicts(d1, d2): + """Recursively merges two configuration trees, with entries + in d2 taking precedence over d1""" + if not d1: + return d2.copy() + if not d2: + return d1 + + for key2, val2 in d2.iteritems(): + if not key2 in d1: + d1[key2] = val2 + continue + val1 = d1[key2] + if isinstance(val1, dict) and isinstance(val2, dict): + d1[key2] = _merge_dicts(val1, val2) + continue + if isinstance(val1, list) and isinstance(val2, list): + val1.extend(val2) + seen = set() + d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ] + continue + d1[key2] = val2 + return d1 + + +def get_config(category_name): + """Get the confguration tree for the names category. Strips off the + top-level category entry from the dict""" + global config_scopes + category = _config_sections[category_name] + if category.result_dict: + return category.result_dict + + category.result_dict = {} + for scope, scope_path in config_scopes: + path = os.path.join(scope_path, category.filename) + result = _read_config_file(path) + if not result: + continue + if not category_name in result: + continue + category.files_read_from.insert(0, path) + result = result[category_name] + if category.merge: + category.result_dict = _merge_dicts(category.result_dict, result) else: - _config[scope] = SpackConfigParser(_scopes[scope]) - - return _config[scope] - - -def get_filename(scope): - """Get the filename for a particular config scope.""" - if not scope in _scopes: - raise UnknownConfigurationScopeError(scope) - return _scopes[scope] - - -def _parse_key(key): - """Return the section, name, and option the field describes. - Values are returned in a 3-tuple. - - e.g.: - The field name ``compiler.gcc@4.7.3.cc`` refers to the 'cc' key - in a section that looks like this: - - [compiler "gcc@4.7.3"] - cc = /usr/local/bin/gcc - - * The section is ``compiler`` - * The name is ``gcc@4.7.3`` - * The key is ``cc`` - """ - match = re.search(_field_regex, key) - if match: - return match.groups() + category.result_dict = result + return category.result_dict + + +def get_compilers_config(arch=None): + """Get the compiler configuration from config files for the given + architecture. Strips off the architecture component of the + configuration""" + global _compiler_by_arch + if not arch: + arch = spack.architecture.sys_type() + if arch in _compiler_by_arch: + return _compiler_by_arch[arch] + + cc_config = get_config('compilers') + if arch in cc_config and 'all' in cc_config: + arch_compiler = dict(cc_config[arch]) + _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all']) + elif arch in cc_config: + _compiler_by_arch[arch] = cc_config[arch] + elif 'all' in cc_config: + _compiler_by_arch[arch] = cc_config['all'] else: - raise InvalidSectionNameError(key) - - -def _make_section_name(section, name): - if not name: - return section - return '%s "%s"' % (section, name) - - -def _autokey(fun): - """Allow a function to be called with a string key like - 'compiler.gcc.cc', or with the section, name, and option - separated. Function should take at least three args, e.g.: - - fun(self, section, name, option, [...]) - - This will allow the function above to be called normally or - with a string key, e.g.: - - fun(self, key, [...]) - """ - argspec = inspect.getargspec(fun) - fun_nargs = len(argspec[0]) - - def string_key_func(*args): - nargs = len(args) - if nargs == fun_nargs - 2: - section, name, option = _parse_key(args[1]) - return fun(args[0], section, name, option, *args[2:]) - - elif nargs == fun_nargs: - return fun(*args) - - else: - raise TypeError( - "%s takes %d or %d args (found %d)." - % (fun.__name__, fun_nargs - 2, fun_nargs, len(args))) - return string_key_func - - - -class SpackConfigParser(cp.RawConfigParser): - """Slightly modified from Python's raw config file parser to accept - leading whitespace and preserve comments. - """ - # Slightly modify Python option expressions to allow leading whitespace - OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern) - - def __init__(self, file_or_files): - cp.RawConfigParser.__init__(self, dict_type=OrderedDict) - - if isinstance(file_or_files, basestring): - self.read([file_or_files]) - self.filename = file_or_files - - else: - self.read(file_or_files) - self.filename = None - - - @_autokey - def set_value(self, section, name, option, value): - """Set the value for a key. If the key is in a section or named - section that does not yet exist, add that section. - """ - sn = _make_section_name(section, name) - if not self.has_section(sn): - self.add_section(sn) - - # Allow valueless config options to be set like this: - # spack config set mirror https://foo.bar.com - # - # Instead of this, which parses incorrectly: - # spack config set mirror.https://foo.bar.com - # - if option is None: - option = value - value = None - - self.set(sn, option, value) - - - @_autokey - def get_value(self, section, name, option): - """Get the value for a key. Raises NoOptionError or NoSectionError if - the key is not present.""" - sn = _make_section_name(section, name) - + _compiler_by_arch[arch] = {} + return _compiler_by_arch[arch] + + +def get_mirror_config(): + """Get the mirror configuration from config files""" + return get_config('mirrors') + + +def get_config_scope_dirname(scope): + """For a scope return the config directory""" + global config_scopes + for s,p in config_scopes: + if s == scope: + return p + tty.die("Unknown scope %s. Valid options are %s" % + (scope, ", ".join([s for s,p in config_scopes]))) + + +def get_config_scope_filename(scope, category_name): + """For some scope and category, get the name of the configuration file""" + if not category_name in _config_sections: + tty.die("Unknown config category %s. Valid options are: %s" % + (category_name, ", ".join([s for s in _config_sections]))) + return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename) + + +def add_to_config(category_name, addition_dict, scope=None): + """Merge a new dict into a configuration tree and write the new + configuration to disk""" + global _read_config_file_result + get_config(category_name) + category = _config_sections[category_name] + + #If scope is specified, use it. Otherwise use the last config scope that + #we successfully parsed data from. + file = None + path = None + if not scope and not category.files_read_from: + scope = 'user' + if scope: try: - if not option: - # TODO: format this better - return self.items(sn) - - return self.get(sn, option) - - # Wrap ConfigParser exceptions in SpackExceptions - except cp.NoOptionError, e: raise NoOptionError(e) - except cp.NoSectionError, e: raise NoSectionError(e) - except cp.Error, e: raise ConfigParserError(e) - - - @_autokey - def has_value(self, section, name, option): - """Return whether the configuration file has a value for a - particular key.""" - sn = _make_section_name(section, name) - return self.has_option(sn, option) - - - def has_named_section(self, section, name): - sn = _make_section_name(section, name) - return self.has_section(sn) - - - def remove_named_section(self, section, name): - sn = _make_section_name(section, name) - self.remove_section(sn) - - - def get_section_names(self, sectype): - """Get all named sections with the specified type. - A named section looks like this: - - [compiler "gcc@4.7"] - - Names of sections are returned as a list, e.g.: - - ['gcc@4.7', 'intel@12.3', 'pgi@4.2'] - - You can get items in the sections like this: - """ - sections = [] - for secname in self.sections(): - match = re.match(_named_section_re, secname) - if match: - t, name = match.groups() - if t == sectype: - sections.append(name) - return sections - - - def write(self, path_or_fp=None): - """Write this configuration out to a file. - - If called with no arguments, this will write the - configuration out to the file from which it was read. If - this config was read from multiple files, e.g. site - configuration and then user configuration, write will - simply raise an error. - - If called with a path or file object, this will write the - configuration out to the supplied path or file object. - """ - if path_or_fp is None: - if not self.filename: - raise ReadOnlySpackConfigError() - path_or_fp = self.filename - - if isinstance(path_or_fp, basestring): - path_or_fp = open(path_or_fp, 'w') - - self._write(path_or_fp) - - - def _read(self, fp, fpname): - """This is a copy of Python 2.6's _read() method, with support for - continuation lines removed.""" - cursect = None # None, or a dictionary - optname = None - comment = 0 - lineno = 0 - e = None # None, or an exception - while True: - line = fp.readline() - if not line: - break - lineno = lineno + 1 - # comment or blank line? - if ((line.strip() == '' or line[0] in '#;') or - (line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")): - self._sections["comment-%d" % comment] = line - comment += 1 - # a section header or option header? - else: - # is it a section header? - mo = self.SECTCRE.match(line) - if mo: - sectname = mo.group('header') - if sectname in self._sections: - cursect = self._sections[sectname] - elif sectname == cp.DEFAULTSECT: - cursect = self._defaults - else: - cursect = self._dict() - cursect['__name__'] = sectname - self._sections[sectname] = cursect - # So sections can't start with a continuation line - optname = None - # no section header in the file? - elif cursect is None: - raise cp.MissingSectionHeaderError(fpname, lineno, line) - # an option line? - else: - mo = self.OPTCRE.match(line) - if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') - if vi in ('=', ':') and ';' in optval: - # ';' is a comment delimiter only if it follows - # a spacing character - pos = optval.find(';') - if pos != -1 and optval[pos-1].isspace(): - optval = optval[:pos] - optval = optval.strip() - # allow empty values - if optval == '""': - optval = '' - optname = self.optionxform(optname.rstrip()) - cursect[optname] = optval - else: - # a non-fatal parsing error occurred. set up the - # exception but keep going. the exception will be - # raised at the end of the file and will contain a - # list of all bogus lines - if not e: - e = cp.ParsingError(fpname) - e.append(lineno, repr(line)) - # if any parsing errors occurred, raise an exception - if e: - raise e - - - - - def _write(self, fp): - """Write an .ini-format representation of the configuration state. - - This is taken from the default Python 2.6 source. It writes 4 - spaces at the beginning of lines instead of no leading space. - """ - if self._defaults: - fp.write("[%s]\n" % cp.DEFAULTSECT) - for (key, value) in self._defaults.items(): - fp.write(" %s = %s\n" % (key, str(value).replace('\n', '\n\t'))) - fp.write("\n") - - for section in self._sections: - # Handles comments and blank lines. - if isinstance(self._sections[section], basestring): - fp.write(self._sections[section]) - continue - - else: - # Allow leading whitespace - fp.write("[%s]\n" % section) - for (key, value) in self._sections[section].items(): - if key != "__name__": - fp.write(" %s = %s\n" % - (key, str(value).replace('\n', '\n\t'))) - - -class SpackConfigurationError(spack.error.SpackError): - def __init__(self, *args): - super(SpackConfigurationError, self).__init__(*args) - - -class InvalidConfigurationScopeError(SpackConfigurationError): - def __init__(self, scope): - super(InvalidConfigurationScopeError, self).__init__( - "Invalid configuration scope: '%s'" % scope, - "Options are: %s" % ", ".join(*_scopes.values())) - - -class InvalidSectionNameError(SpackConfigurationError): - """Raised when the name for a section is invalid.""" - def __init__(self, name): - super(InvalidSectionNameError, self).__init__( - "Invalid section specifier: '%s'" % name) - - -class ReadOnlySpackConfigError(SpackConfigurationError): - """Raised when user attempts to write to a config read from multiple files.""" - def __init__(self): - super(ReadOnlySpackConfigError, self).__init__( - "Can only write to a single-file SpackConfigParser") - - -class ConfigParserError(SpackConfigurationError): - """Wrapper for the Python ConfigParser's errors""" - def __init__(self, error): - super(ConfigParserError, self).__init__(str(error)) - self.error = error - - -class NoOptionError(ConfigParserError): - """Wrapper for ConfigParser NoOptionError""" - def __init__(self, error): - super(NoOptionError, self).__init__(error) - - -class NoSectionError(ConfigParserError): - """Wrapper for ConfigParser NoOptionError""" - def __init__(self, error): - super(NoSectionError, self).__init__(error) + dir = get_config_scope_dirname(scope) + if not os.path.exists(dir): + mkdirp(dir) + path = os.path.join(dir, category.filename) + file = open(path, 'w') + except exceptions.IOError, e: + pass + else: + for p in category.files_read_from: + try: + file = open(p, 'w') + except exceptions.IOError, e: + pass + if file: + path = p + break; + if not file: + tty.die('Unable to write to config file %s' % path) + + #Merge the new information into the existing file info, then write to disk + new_dict = _read_config_file_result[path] + if new_dict and category_name in new_dict: + new_dict = new_dict[category_name] + new_dict = _merge_dicts(new_dict, addition_dict) + new_dict = { category_name : new_dict } + _read_config_file_result[path] = new_dict + yaml.dump(new_dict, stream=file, default_flow_style=False) + file.close() + + #Merge the new information into the cached results + category.result_dict = _merge_dicts(category.result_dict, addition_dict) + + +def add_to_mirror_config(addition_dict, scope=None): + """Add mirrors to the configuration files""" + add_to_config('mirrors', addition_dict, scope) + + +def add_to_compiler_config(addition_dict, scope=None, arch=None): + """Add compilerss to the configuration files""" + if not arch: + arch = spack.architecture.sys_type() + add_to_config('compilers', { arch : addition_dict }, scope) + clear_config_caches() + + +def remove_from_config(category_name, key_to_rm, scope=None): + """Remove a configuration key and write a new configuration to disk""" + global config_scopes + get_config(category_name) + scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes] + category = _config_sections[category_name] + + rmd_something = False + for s in scopes_to_rm_from: + path = get_config_scope_filename(scope, category_name) + result = _read_config_file(path) + if not result: + continue + if not key_to_rm in result[category_name]: + continue + with closing(open(path, 'w')) as f: + result[category_name].pop(key_to_rm, None) + yaml.dump(result, stream=f, default_flow_style=False) + category.result_dict.pop(key_to_rm, None) + rmd_something = True + return rmd_something + + +"""Print a configuration to stdout""" +def print_category(category_name): + if not category_name in _config_sections: + tty.die("Unknown config category %s. Valid options are: %s" % + (category_name, ", ".join([s for s in _config_sections]))) + yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False) + diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index d451743508..008c5f0429 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -344,13 +344,9 @@ class DIYStage(object): def _get_mirrors(): """Get mirrors from spack configuration.""" - config = spack.config.get_config() + config = spack.config.get_mirror_config() + return [val for name, val in config.iteritems()] - mirrors = [] - sec_names = config.get_section_names('mirror') - for name in sec_names: - mirrors.append(config.get_value('mirror', name, 'url')) - return mirrors def ensure_access(file=spack.stage_path): diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index c676e9a35b..790b22f3b0 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -26,44 +26,49 @@ import unittest import shutil import os from tempfile import mkdtemp +import spack +from spack.packages import PackageDB +from spack.test.mock_packages_test import * -from spack.config import * +class ConfigTest(MockPackagesTest): + def setUp(self): + self.initmock() + self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') + spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')), + ('test_high_priority', os.path.join(self.tmp_dir, 'high'))] -class ConfigTest(unittest.TestCase): + def tearDown(self): + self.cleanmock() + shutil.rmtree(self.tmp_dir, True) - @classmethod - def setUp(cls): - cls.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') + def check_config(self, comps): + config = spack.config.get_compilers_config() + compiler_list = ['cc', 'cxx', 'f77', 'f90'] + for key in comps: + for c in compiler_list: + if comps[key][c] == '/bad': + continue + self.assertEqual(comps[key][c], config[key][c]) - @classmethod - def tearDown(cls): - shutil.rmtree(cls.tmp_dir, True) - - - def get_path(self): - return os.path.join(ConfigTest.tmp_dir, "spackconfig") + def test_write_key(self): + a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None }, + "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" }, + "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }} + b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None }, + "icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" }, + "clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}} - def test_write_key(self): - config = SpackConfigParser(self.get_path()) - config.set_value('compiler.cc', 'a') - config.set_value('compiler.cxx', 'b') - config.set_value('compiler', 'gcc@4.7.3', 'cc', 'c') - config.set_value('compiler', 'gcc@4.7.3', 'cxx', 'd') - config.write() + spack.config.add_to_compiler_config(a_comps, 'test_low_priority') + spack.config.add_to_compiler_config(b_comps, 'test_high_priority') - config = SpackConfigParser(self.get_path()) + self.check_config(a_comps) + self.check_config(b_comps) - self.assertEqual(config.get_value('compiler.cc'), 'a') - self.assertEqual(config.get_value('compiler.cxx'), 'b') - self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cc'), 'c') - self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cxx'), 'd') + spack.config.clear_config_caches() - self.assertEqual(config.get_value('compiler', None, 'cc'), 'a') - self.assertEqual(config.get_value('compiler', None, 'cxx'), 'b') - self.assertEqual(config.get_value('compiler.gcc@4.7.3.cc'), 'c') - self.assertEqual(config.get_value('compiler.gcc@4.7.3.cxx'), 'd') + self.check_config(a_comps) + self.check_config(b_comps) - self.assertRaises(NoOptionError, config.get_value, 'compiler', None, 'fc') diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 09fb9ebe30..00f81114af 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -31,7 +31,7 @@ from spack.spec import Spec def set_pkg_dep(pkg, spec): - """Alters dependence information for a pacakge. + """Alters dependence information for a package. Use this to mock up constraints. """ spec = Spec(spec) @@ -39,21 +39,32 @@ def set_pkg_dep(pkg, spec): class MockPackagesTest(unittest.TestCase): - def setUp(self): + def initmock(self): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with # real ones. self.real_db = spack.db spack.db = PackageDB(spack.mock_packages_path) - self.real_scopes = spack.config._scopes - spack.config._scopes = { - 'site' : spack.mock_site_config, - 'user' : spack.mock_user_config } + spack.config.clear_config_caches() + self.real_scopes = spack.config.config_scopes + spack.config.config_scopes = [ + ('site', spack.mock_site_config), + ('user', spack.mock_user_config)] - def tearDown(self): + def cleanmock(self): """Restore the real packages path after any test.""" spack.db = self.real_db - spack.config._scopes = self.real_scopes + spack.config.config_scopes = self.real_scopes + spack.config.clear_config_caches() + + + def setUp(self): + self.initmock() + + + def tearDown(self): + self.cleanmock() + diff --git a/var/spack/mock_configs/site_spackconfig b/var/spack/mock_configs/site_spackconfig deleted file mode 100644 index 1358720362..0000000000 --- a/var/spack/mock_configs/site_spackconfig +++ /dev/null @@ -1,12 +0,0 @@ -[compiler "gcc@4.5.0"] - cc = /path/to/gcc - cxx = /path/to/g++ - f77 = /path/to/gfortran - fc = /path/to/gfortran - -[compiler "clang@3.3"] - cc = /path/to/clang - cxx = /path/to/clang++ - f77 = None - fc = None - diff --git a/var/spack/mock_configs/site_spackconfig/compilers.yaml b/var/spack/mock_configs/site_spackconfig/compilers.yaml new file mode 100644 index 0000000000..0a2dc893e2 --- /dev/null +++ b/var/spack/mock_configs/site_spackconfig/compilers.yaml @@ -0,0 +1,12 @@ +compilers: + all: + clang@3.3: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + gcc@4.5.0: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: /path/to/gfortran + fc: /path/to/gfortran diff --git a/var/spack/mock_configs/user_spackconfig b/var/spack/mock_configs/user_spackconfig deleted file mode 100644 index e69de29bb2..0000000000 -- cgit v1.2.3-70-g09d2