summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--lib/spack/spack/cmd/compiler.py7
-rw-r--r--lib/spack/spack/cmd/config.py12
-rw-r--r--lib/spack/spack/cmd/mirror.py4
-rw-r--r--lib/spack/spack/compilers/__init__.py76
-rw-r--r--lib/spack/spack/config.py417
-rw-r--r--lib/spack/spack/spec.py1
-rw-r--r--lib/spack/spack/stage.py5
-rw-r--r--lib/spack/spack/test/config.py21
-rw-r--r--lib/spack/spack/test/database.py3
-rw-r--r--lib/spack/spack/test/mock_packages_test.py6
11 files changed, 285 insertions, 268 deletions
diff --git a/.gitignore b/.gitignore
index 1c6ca4c99e..4b97de5d50 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,4 @@
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules
+/TAGS
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 2a64dc914e..6efc9a3347 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -65,10 +65,11 @@ def compiler_add(args):
if c.spec not in spack.compilers.all_compilers()]
if compilers:
- spack.compilers.add_compilers_to_config('user', *compilers)
+ spack.compilers.add_compilers_to_config('user', compilers)
n = len(compilers)
- tty.msg("Added %d new compiler%s to %s" % (
- n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
+ s = 's' if n > 1 else ''
+ filename = spack.config.get_config_filename('user', 'compilers')
+ tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else:
tty.msg("Found no new compilers")
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index 8c18f88b64..603023d891 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -44,22 +44,22 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
get_parser = sp.add_parser('get', help='Print configuration values.')
- get_parser.add_argument('category', help="Configuration category to print.")
+ get_parser.add_argument('section', help="Configuration section to print.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
- edit_parser.add_argument('category', help="Configuration category to edit")
+ edit_parser.add_argument('section', help="Configuration section to edit")
def config_get(args):
- spack.config.print_category(args.category)
+ spack.config.print_section(args.section)
def config_edit(args):
if not args.scope:
args.scope = 'user'
- if not args.category:
- args.category = None
- config_file = spack.config.get_config_scope_filename(args.scope, args.category)
+ if not args.section:
+ args.section = None
+ config_file = spack.config.get_config_filename(args.scope, args.section)
spack.editor(config_file)
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 9a507e69db..2b25793927 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -76,7 +76,7 @@ def mirror_add(args):
url = 'file://' + url
mirror_dict = { args.name : url }
- spack.config.add_to_mirror_config({ args.name : url })
+ spack.config.update_config('mirrors', { args.name : url }, 'user')
def mirror_remove(args):
@@ -90,7 +90,7 @@ def mirror_remove(args):
def mirror_list(args):
"""Print out available mirrors to the console."""
- sec_names = spack.config.get_mirror_config()
+ sec_names = spack.config.get_config('mirrors')
if not sec_names:
tty.msg("No mirrors configured.")
return
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index b7b021a1ac..a1980f1cdf 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -35,6 +35,7 @@ import spack
import spack.error
import spack.spec
import spack.config
+import spack.architecture
from spack.util.multiproc import parmap
from spack.compiler import Compiler
@@ -55,23 +56,48 @@ def _auto_compiler_spec(function):
return converter
-def _get_config():
- """Get a Spack config, but make sure it has compiler configuration
- first."""
+def _to_dict(compiler):
+ """Return a dict version of compiler suitable to insert in YAML."""
+ return {
+ str(compiler.spec) : dict(
+ (attr, getattr(compiler, attr, None))
+ for attr in _required_instance_vars)
+ }
+
+
+def get_compiler_config(arch=None):
+ """Return the compiler configuration for the specified architecture.
+
+ If the compiler configuration designates some compilers for
+ 'all' architectures, those are merged into the result, as well.
+
+ """
# If any configuration file has compilers, just stick with the
# ones already configured.
- config = spack.config.get_compilers_config()
- existing = [spack.spec.CompilerSpec(s)
- for s in config]
- if existing:
- return config
+ config = spack.config.get_config('compilers')
+
+ if arch is None:
+ arch = spack.architecture.sys_type()
- compilers = find_compilers(*get_path('PATH'))
- add_compilers_to_config('user', *compilers)
+ if arch not in config:
+ config[arch] = {}
+ compilers = find_compilers(*get_path('PATH'))
+ for compiler in compilers:
+ config[arch].update(_to_dict(compiler))
+ spack.config.update_config('compilers', config, 'user')
- # After writing compilers to the user config, return a full config
- # from all files.
- return spack.config.get_compilers_config()
+ # Merge 'all' compilers with arch-specific ones.
+ merged_config = config.get('all', {})
+ merged_config = spack.config._merge_yaml(merged_config, config[arch])
+
+ return merged_config
+
+
+def all_compilers(arch=None):
+ """Return a set of specs for all the compiler versions currently
+ available to build with. These are instances of CompilerSpec.
+ """
+ return [spack.spec.CompilerSpec(s) for s in get_compiler_config(arch)]
_cached_default_compiler = None
@@ -123,20 +149,6 @@ def find_compilers(*path):
return clist
-def add_compilers_to_config(scope, *compilers):
- compiler_config_tree = {}
- for compiler in compilers:
- compiler_entry = {}
- for c in _required_instance_vars:
- val = getattr(compiler, c)
- if not val:
- val = "None"
- compiler_entry[c] = val
- compiler_config_tree[str(compiler.spec)] = compiler_entry
- spack.config.add_to_compiler_config(compiler_config_tree, scope)
-
-
-
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
@@ -152,14 +164,6 @@ def supported(compiler_spec):
return compiler_spec.name in supported_compilers()
-def all_compilers():
- """Return a set of specs for all the compiler versions currently
- available to build with. These are instances of CompilerSpec.
- """
- configuration = _get_config()
- return [spack.spec.CompilerSpec(s) for s in configuration]
-
-
@_auto_compiler_spec
def find(compiler_spec):
"""Return specs of available compilers that match the supplied
@@ -172,7 +176,7 @@ def compilers_for_spec(compiler_spec):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
- config = _get_config()
+ config = get_compiler_config()
def get_compiler(cspec):
items = config[str(cspec)]
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 36bf8a7fc3..7d7a87c7dc 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -67,25 +67,54 @@ In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
categorize entries beneath them in the tree. At the root of the tree,
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
-Spack returns these trees as nested dicts. The dict for the above example
-would looks like:
-
- { 'compilers' :
- { 'chaos_5_x86_64_ib' :
- { 'gcc@4.4.7' :
- { 'cc' : '/usr/bin/gcc',
- 'cxx' : '/usr/bin/g++'
- 'f77' : '/usr/bin/gfortran'
- 'fc' : '/usr/bin/gfortran' }
- }
- { 'bgqos_0' :
- { 'cc' : '/usr/local/bin/mpixlc' }
- }
- }
-
-Some convenience functions, like get_mirrors_config and
-``get_compilers_config`` may strip off the top-levels of the tree and
-return subtrees.
+``config.get_config()`` returns these trees as nested dicts, but it
+strips the first level off. So, ``config.get_config('compilers')``
+would return something like this for the above example:
+
+ { 'chaos_5_x86_64_ib' :
+ { 'gcc@4.4.7' :
+ { 'cc' : '/usr/bin/gcc',
+ 'cxx' : '/usr/bin/g++'
+ 'f77' : '/usr/bin/gfortran'
+ 'fc' : '/usr/bin/gfortran' }
+ }
+ { 'bgqos_0' :
+ { 'cc' : '/usr/local/bin/mpixlc' } }
+
+Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``,
+but ``get_config()`` strips that off too.
+
+Precedence
+===============================
+
+``config.py`` routines attempt to recursively merge configuration
+across scopes. So if there are ``compilers.py`` files in both the
+site scope and the user scope, ``get_config('compilers')`` will return
+merged dictionaries of *all* the compilers available. If a user
+compiler conflicts with a site compiler, Spack will overwrite the site
+configuration wtih the user configuration. If both the user and site
+``mirrors.yaml`` files contain lists of mirrors, then ``get_config()``
+will return a concatenated list of mirrors, with the user config items
+first.
+
+Sometimes, it is useful to *completely* override a site setting with a
+user one. To accomplish this, you can use *two* colons at the end of
+a key in a configuration file. For example, this:
+
+ compilers::
+ chaos_5_x86_64_ib:
+ gcc@4.4.7:
+ cc: /usr/bin/gcc
+ cxx: /usr/bin/g++
+ f77: /usr/bin/gfortran
+ fc: /usr/bin/gfortran
+ bgqos_0:
+ xlc@12.1:
+ cc: /usr/local/bin/mpixlc
+ ...
+
+Will make Spack take compilers *only* from the user configuration, and
+the site configuration will be ignored.
"""
import os
@@ -96,80 +125,132 @@ from external.yaml.error import MarkedYAMLError
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
-from llnl.util.lang import memoized
import spack
+from spack.error import SpackError
+"""List of valid config sections."""
+valid_sections = ('compilers', 'mirrors', 'repos')
-_config_sections = {}
-class _ConfigCategory:
- name = None
- filename = None
- merge = True
- def __init__(self, name, filename, merge, strip):
- self.name = name
- self.filename = filename
- self.merge = merge
- self.strip = strip
- self.files_read_from = []
- self.result_dict = {}
- _config_sections[name] = self
-
-_ConfigCategory('repos', 'repos.yaml', True, True)
-_ConfigCategory('compilers', 'compilers.yaml', True, True)
-_ConfigCategory('mirrors', 'mirrors.yaml', True, True)
-_ConfigCategory('view', 'views.yaml', True, True)
-_ConfigCategory('order', 'orders.yaml', True, True)
-
-"""Names of scopes and their corresponding configuration files."""
-config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
- ('user', os.path.expanduser('~/.spack'))]
-
-_compiler_by_arch = {}
-
-@memoized
-def _read_config_file(filename):
- """Read a YAML configuration file"""
+def check_section(section):
+ """Raise a ValueError if the section is not a valid section."""
+ if section not in valid_sections:
+ raise ValueError("Invalid config section: '%s'. Options are %s."
+ % (section, valid_sections))
+
+
+class ConfigScope(object):
+ """This class represents a configuration scope.
+
+ A scope is one directory containing named configuration files.
+ Each file is a config "section" (e.g., mirrors, compilers, etc).
+ """
+ def __init__(self, name, path):
+ self.name = name # scope name.
+ self.path = path # path to directory containing configs.
+ self.sections = {} # sections read from config files.
+
+
+ def get_section_filename(self, section):
+ check_section(section)
+ return os.path.join(self.path, "%s.yaml" % section)
+
+
+ def get_section(self, section):
+ if not section in self.sections:
+ path = self.get_section_filename(section)
+ data = _read_config_file(path)
+ self.sections[section] = {} if data is None else data
+ return self.sections[section]
+
+
+ def write_section(self, section):
+ filename = self.get_section_filename(section)
+ data = self.get_section(section)
+ try:
+ mkdirp(self.path)
+ with open(filename, 'w') as f:
+ yaml.dump(data, stream=f, default_flow_style=False)
+ except (yaml.YAMLError, IOError) as e:
+ raise ConfigFileError("Error writing to config file: '%s'" % str(e))
+
+
+ def clear(self):
+ """Empty cached config information."""
+ self.sections = {}
+
+
+"""List of config scopes by name.
+ Later scopes in the list will override earlier scopes.
+"""
+config_scopes = [
+ ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
+ ConfigScope('user', os.path.expanduser('~/.spack'))]
+
+"""List of valid scopes, for convenience."""
+valid_scopes = (s.name for s in config_scopes)
+
+
+def check_scope(scope):
+ if scope is None:
+ return 'user'
+ elif scope not in valid_scopes:
+ raise ValueError("Invalid config scope: '%s'. Must be one of %s."
+ % (scope, valid_scopes))
+ return scope
+
+
+def get_scope(scope):
+ scope = check_scope(scope)
+ return next(s for s in config_scopes if s.name == scope)
+
+
+def _read_config_file(filename):
+ """Read a YAML configuration file."""
# Ignore nonexisting files.
if not os.path.exists(filename):
return None
elif not os.path.isfile(filename):
- tty.die("Invlaid configuration. %s exists but is not a file." % filename)
+ raise ConfigFileError(
+ "Invlaid configuration. %s exists but is not a file." % filename)
elif not os.access(filename, os.R_OK):
- tty.die("Configuration file %s is not readable." % filename)
+ raise ConfigFileError("Config file is not readable: %s." % filename)
try:
with open(filename) as f:
return yaml.load(f)
except MarkedYAMLError, e:
- tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
+ raise ConfigFileError(
+ "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
except IOError, e:
- tty.die("Error reading configuration file %s: %s" % (filename, str(e)))
+ raise ConfigFileError(
+ "Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches():
"""Clears the caches for configuration files, which will cause them
to be re-read upon the next request"""
- for key,s in _config_sections.iteritems():
- s.files_read_from = []
- s.result_dict = {}
-
- _read_config_file.clear()
- spack.config._compiler_by_arch = {}
- spack.compilers._cached_default_compiler = None
+ for scope in config_scopes:
+ scope.clear()
def _merge_yaml(dest, source):
"""Merges source into dest; entries in source take precedence over dest.
+ This routine may modify dest and should be assigned to dest, in
+ case dest was None to begin with, e.g.:
+
+ dest = _merge_yaml(dest, source)
+
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
+
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
@@ -212,61 +293,31 @@ def substitute_spack_prefix(path):
return path.replace('$spack', spack.prefix)
-def get_config(category):
- """Get the confguration tree for a category.
+def get_config(section):
+ """Get configuration settings for a section.
- Strips off the top-level category entry from the dict
+ Strips off the top-level section name from the YAML dict.
"""
- category = _config_sections[category]
- if category.result_dict:
- return category.result_dict
-
- category.result_dict = {}
- for scope, scope_path in config_scopes:
- path = os.path.join(scope_path, category.filename)
- result = _read_config_file(path)
- if not result:
+ check_section(section)
+ merged_section = {}
+
+ for scope in config_scopes:
+ # read potentially cached data from the scope.
+ data = scope.get_section(section)
+ if not data or not section in data:
continue
- if category.strip:
- if not category.name in result:
- continue
- result = result[category.name]
-
- # ignore empty sections for easy commenting of single-line configs.
- if result is None:
- continue
-
- category.files_read_from.insert(0, path)
- if category.merge:
- category.result_dict = _merge_yaml(category.result_dict, result)
- else:
- category.result_dict = result
-
- return category.result_dict
-
-
-def get_compilers_config(arch=None):
- """Get the compiler configuration from config files for the given
- architecture. Strips off the architecture component of the
- configuration"""
- global _compiler_by_arch
- if not arch:
- arch = spack.architecture.sys_type()
- if arch in _compiler_by_arch:
- return _compiler_by_arch[arch]
-
- cc_config = get_config('compilers')
- if arch in cc_config and 'all' in cc_config:
- arch_compiler = dict(cc_config[arch])
- _compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all'])
- elif arch in cc_config:
- _compiler_by_arch[arch] = cc_config[arch]
- elif 'all' in cc_config:
- _compiler_by_arch[arch] = cc_config['all']
- else:
- _compiler_by_arch[arch] = {}
- return _compiler_by_arch[arch]
+ # extract data under the section name header
+ data = data[section]
+
+ # ignore empty sections for easy commenting of single-line configs.
+ if not data:
+ continue
+
+ # merge config data from scopes.
+ merged_section = _merge_yaml(merged_section, data)
+
+ return merged_section
def get_repos_config():
@@ -284,119 +335,71 @@ def get_repos_config():
return [expand_repo_path(repo) for repo in repo_list]
-def get_mirror_config():
- """Get the mirror configuration from config files"""
- return get_config('mirrors')
-
+def get_config_filename(scope, section):
+ """For some scope and section, get the name of the configuration file"""
+ scope = get_scope(scope)
+ return scope.get_section_filename(section)
-def get_config_scope_dirname(scope):
- """For a scope return the config directory"""
- for s,p in config_scopes:
- if s == scope:
- return p
- tty.die("Unknown scope %s. Valid options are %s" %
- (scope, ", ".join([s for s,p in config_scopes])))
+def update_config(section, update_data, scope=None):
+ """Update the configuration file for a particular scope.
-def get_config_scope_filename(scope, category_name):
- """For some scope and category, get the name of the configuration file"""
- if not category_name in _config_sections:
- tty.die("Unknown config category %s. Valid options are: %s" %
- (category_name, ", ".join([s for s in _config_sections])))
- return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
+ Merges contents of update_data into the scope's data for the
+ specified section, then writes out the config file.
+ update_data shoudl contain only the section's data, with the
+ top-level name stripped off. This can be a list, dict, or any
+ other yaml-ish structure.
-def add_to_config(category_name, addition_dict, scope=None):
- """Merge a new dict into a configuration tree and write the new
- configuration to disk"""
- get_config(category_name)
- category = _config_sections[category_name]
-
- # If scope is specified, use it. Otherwise use the last config scope that
- # we successfully parsed data from.
- file = None
- path = None
- if not scope and not category.files_read_from:
- scope = 'user'
-
- if scope:
- try:
- dir = get_config_scope_dirname(scope)
- if not os.path.exists(dir):
- mkdirp(dir)
- path = os.path.join(dir, category.filename)
- file = open(path, 'w')
- except IOError, e:
- pass
- else:
- for p in category.files_read_from:
- try:
- file = open(p, 'w')
- except IOError, e:
- pass
- if file:
- path = p
- break;
+ """
+ # read in the config to ensure we've got current data
+ get_config(section)
- if not file:
- tty.die('Unable to write to config file %s' % path)
+ check_section(section) # validate section name
+ scope = get_scope(scope) # get ConfigScope object from string.
- # Merge the new information into the existing file info, then write to disk
- new_dict = _read_config_file(path)
+ # read only the requested section's data.
+ data = scope.get_section(section)
+ data = _merge_yaml(data, { section : update_data })
+ scope.write_section(section)
- if new_dict and category_name in new_dict:
- new_dict = new_dict[category_name]
- new_dict = _merge_yaml(new_dict, addition_dict)
- new_dict = { category_name : new_dict }
+def remove_from_config(section, key_to_rm, scope=None):
+ """Remove a configuration key and write updated configuration to disk.
- # Install new dict as memoized value, and dump to disk
- _read_config_file.cache[path] = new_dict
- yaml.dump(new_dict, stream=file, default_flow_style=False)
- file.close()
+ Return True if something was removed, False otherwise.
- # Merge the new information into the cached results
- category.result_dict = _merge_yaml(category.result_dict, addition_dict)
+ """
+ # ensure configs are current by reading in.
+ get_config(section)
+ # check args and get the objects we need.
+ scope = get_scope(scope)
+ data = scope.get_section(section)
+ filename = scope.get_section_filename(section)
-def add_to_mirror_config(addition_dict, scope=None):
- """Add mirrors to the configuration files"""
- add_to_config('mirrors', addition_dict, scope)
+ # do some checks
+ if not data:
+ return False
+ if not section in data:
+ raise ConfigFileError("Invalid configuration file: '%s'" % filename)
-def add_to_compiler_config(addition_dict, scope=None, arch=None):
- """Add compilerss to the configuration files"""
- if not arch:
- arch = spack.architecture.sys_type()
- add_to_config('compilers', { arch : addition_dict }, scope)
- clear_config_caches()
+ if key_to_rm not in section[section]:
+ return False
+ # remove the key from the section's configuration
+ del data[section][key_to_rm]
+ scope.write_section(section)
-def remove_from_config(category_name, key_to_rm, scope=None):
- """Remove a configuration key and write a new configuration to disk"""
- get_config(category_name)
- scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
- category = _config_sections[category_name]
- rmd_something = False
- for s in scopes_to_rm_from:
- path = get_config_scope_filename(scope, category_name)
- result = _read_config_file(path)
- if not result:
- continue
- if not key_to_rm in result[category_name]:
- continue
- with open(path, 'w') as f:
- result[category_name].pop(key_to_rm, None)
- yaml.dump(result, stream=f, default_flow_style=False)
- category.result_dict.pop(key_to_rm, None)
- rmd_something = True
- return rmd_something
+"""Print a configuration to stdout"""
+def print_section(section):
+ try:
+ yaml.dump(get_config(section), stream=sys.stdout, default_flow_style=False)
+ except (yaml.YAMLError, IOError) as e:
+ raise ConfigError("Error reading configuration: %s" % section)
-"""Print a configuration to stdout"""
-def print_category(category_name):
- if not category_name in _config_sections:
- tty.die("Unknown config category %s. Valid options are: %s" %
- (category_name, ", ".join([s for s in _config_sections])))
- yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)
+class ConfigError(SpackError): pass
+class ConfigFileError(ConfigError): pass
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 303df6df38..2f1b6e29ea 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -531,6 +531,7 @@ class Spec(object):
and self.architecture
and self.compiler and self.compiler.concrete
and self.dependencies.concrete)
+
return self._concrete
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 78930ecb5b..da85bd6f21 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -242,7 +242,8 @@ class Stage(object):
# TODO: move mirror logic out of here and clean it up!
if self.mirror_path:
- urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]
+ urls = ["%s/%s" % (u, self.mirror_path)
+ for name, u in spack.config.get_config('mirrors')]
digest = None
if isinstance(self.fetcher, fs.URLFetchStrategy):
@@ -345,7 +346,7 @@ class DIYStage(object):
def _get_mirrors():
"""Get mirrors from spack configuration."""
- config = spack.config.get_mirror_config()
+ config = spack.config.get_config('mirrors')
return [val for name, val in config.iteritems()]
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index b1195dfe4e..fe6cec82fe 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -27,6 +27,7 @@ import shutil
import os
from tempfile import mkdtemp
import spack
+import spack.config
from spack.test.mock_packages_test import *
# Some sample compiler config data
@@ -72,9 +73,9 @@ class ConfigTest(MockPackagesTest):
super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
spack.config.config_scopes = [
- ('test_low_priority', os.path.join(self.tmp_dir, 'low')),
- ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
-
+ spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')),
+ spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+ spack.config.valid_scopes = ('test_low_priority', 'test_high_priority')
def tearDown(self):
super(ConfigTest, self).tearDown()
@@ -83,17 +84,19 @@ class ConfigTest(MockPackagesTest):
def check_config(self, comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
- config = spack.config.get_compilers_config()
+ config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'f90']
for key in compiler_names:
for c in compiler_list:
- self.assertEqual(comps[key][c], config[key][c])
+ expected = comps[key][c]
+ actual = config[key][c]
+ self.assertEqual(expected, actual)
def test_write_key_in_memory(self):
# Write b_comps "on top of" a_comps.
- spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
- spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
+ spack.config.update_config('compilers', a_comps, 'test_low_priority')
+ spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Make sure the config looks how we expect.
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
@@ -102,8 +105,8 @@ class ConfigTest(MockPackagesTest):
def test_write_key_to_disk(self):
# Write b_comps "on top of" a_comps.
- spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
- spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
+ spack.config.update_config('compilers', a_comps, 'test_low_priority')
+ spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 5ce010ae8f..e1f7961bed 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -79,7 +79,8 @@ class DatabaseTest(MockPackagesTest):
def _mock_install(self, spec):
s = Spec(spec)
- pkg = spack.repo.get(s.concretized())
+ s.concretize()
+ pkg = spack.repo.get(s)
pkg.do_install(fake=True)
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 2150b40876..6d92aacab9 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -41,9 +41,10 @@ class MockPackagesTest(unittest.TestCase):
spack.config.clear_config_caches()
self.real_scopes = spack.config.config_scopes
+ self.real_valid_scopes = spack.config.valid_scopes
spack.config.config_scopes = [
- ('site', spack.mock_site_config),
- ('user', spack.mock_user_config)]
+ spack.config.ConfigScope('site', spack.mock_site_config),
+ spack.config.ConfigScope('user', spack.mock_user_config)]
# Store changes to the package's dependencies so we can
# restore later.
@@ -71,6 +72,7 @@ class MockPackagesTest(unittest.TestCase):
"""Restore the real packages path after any test."""
spack.repo.swap(self.db)
spack.config.config_scopes = self.real_scopes
+ spack.config.valid_scopes = self.real_valid_scopes
spack.config.clear_config_caches()
# Restore dependency changes that happened during the test