summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/__init__.py16
-rw-r--r--lib/spack/spack/cmd/repo.py (renamed from lib/spack/spack/cmd/packagerepo.py)43
-rw-r--r--lib/spack/spack/config.py206
-rw-r--r--lib/spack/spack/packages.py117
-rw-r--r--lib/spack/spack/repo_loader.py35
-rw-r--r--lib/spack/spack/test/config.py82
6 files changed, 321 insertions, 178 deletions
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 1d67b45341..09bc9ca52a 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -47,11 +47,17 @@ install_path = join_path(opt_path, "spack")
share_path = join_path(prefix, "share", "spack")
#
-# Set up the packages database.
+# Setup the spack.repos namespace
+#
+from spack.repo_loader import RepoNamespace
+repos = RepoNamespace()
+
+#
+# Set up the default packages database.
#
from spack.packages import PackageDB
packages_path = join_path(var_path, "packages")
-db = PackageDB(packages_path)
+db = PackageDB()
#
# Paths to mock files for testing.
@@ -63,12 +69,6 @@ mock_site_config = join_path(mock_config_path, "site_spackconfig")
mock_user_config = join_path(mock_config_path, "user_spackconfig")
#
-# Setup the spack.repos namespace
-#
-from spack.repo_loader import RepoNamespace
-repos = RepoNamespace()
-
-#
# This controls how spack lays out install prefixes and
# stage directories.
#
diff --git a/lib/spack/spack/cmd/packagerepo.py b/lib/spack/spack/cmd/repo.py
index 2819d0f980..1261c7ada9 100644
--- a/lib/spack/spack/cmd/packagerepo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -32,7 +32,7 @@ from llnl.util.filesystem import join_path, mkdirp
import spack.spec
import spack.config
from spack.util.environment import get_path
-from spack.packages import packagerepo_filename
+from spack.packages import repo_config
import os
import exceptions
@@ -42,43 +42,48 @@ description = "Manage package sources"
def setup_parser(subparser):
sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='packagerepo_command')
+ metavar='SUBCOMMAND', dest='repo_command')
- add_parser = sp.add_parser('add', help=packagerepo_add.__doc__)
+ add_parser = sp.add_parser('add', help=repo_add.__doc__)
add_parser.add_argument('directory', help="Directory containing the packages.")
- create_parser = sp.add_parser('create', help=packagerepo_create.__doc__)
+ create_parser = sp.add_parser('create', help=repo_create.__doc__)
create_parser.add_argument('directory', help="Directory containing the packages.")
create_parser.add_argument('name', help="Name of new package repository.")
+<<<<<<< HEAD:lib/spack/spack/cmd/packagerepo.py
remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__)
+=======
+
+ remove_parser = sp.add_parser('remove', help=repo_remove.__doc__)
+>>>>>>> Save changes to external repo integration:lib/spack/spack/cmd/repo.py
remove_parser.add_argument('name')
- list_parser = sp.add_parser('list', help=packagerepo_list.__doc__)
+ list_parser = sp.add_parser('list', help=repo_list.__doc__)
def add_to_config(dir):
config = spack.config.get_config()
user_config = spack.config.get_config('user')
orig = None
- if config.has_value('packagerepo', '', 'directories'):
- orig = config.get_value('packagerepo', '', 'directories')
+ if config.has_value('repo', '', 'directories'):
+ orig = config.get_value('repo', '', 'directories')
if orig and dir in orig.split(':'):
return False
newsetting = orig + ':' + dir if orig else dir
- user_config.set_value('packagerepo', '', 'directories', newsetting)
+ user_config.set_value('repo', '', 'directories', newsetting)
user_config.write()
return True
-def packagerepo_add(args):
+def repo_add(args):
"""Add package sources to the Spack configuration."""
if not add_to_config(args.directory):
tty.die('Repo directory %s already exists in the repo list' % dir)
-def packagerepo_create(args):
+def repo_create(args):
"""Create a new package repo at a directory and name"""
dir = args.directory
name = args.name
@@ -90,7 +95,7 @@ def packagerepo_create(args):
mkdirp(dir)
except exceptions.OSError, e:
tty.die('Failed to create new directory %s' % dir)
- path = os.path.join(dir, packagerepo_filename)
+ path = os.path.join(dir, repo_config)
try:
with closing(open(path, 'w')) as repofile:
repofile.write(name + '\n')
@@ -101,12 +106,12 @@ def packagerepo_create(args):
tty.warn('Repo directory %s already exists in the repo list' % dir)
-def packagerepo_remove(args):
+def repo_remove(args):
"""Remove a package source from the Spack configuration"""
pass
-def packagerepo_list(args):
+def repo_list(args):
"""List package sources and their mnemoics"""
root_names = spack.db.repos
max_len = max(len(s[0]) for s in root_names)
@@ -116,9 +121,9 @@ def packagerepo_list(args):
-def packagerepo(parser, args):
- action = { 'add' : packagerepo_add,
- 'create' : packagerepo_create,
- 'remove' : packagerepo_remove,
- 'list' : packagerepo_list }
- action[args.packagerepo_command](args)
+def repo(parser, args):
+ action = { 'add' : repo_add,
+ 'create' : repo_create,
+ 'remove' : repo_remove,
+ 'list' : repo_list }
+ action[args.repo_command](args)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 3e91958c2c..dc59f9a5a3 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -45,11 +45,11 @@ several configuration files, such as compilers.yaml or mirrors.yaml.
Configuration file format
===============================
-Configuration files are formatted using YAML syntax.
-This format is implemented by Python's
-yaml class, and it's easy to read and versatile.
+Configuration files are formatted using YAML syntax. This format is
+implemented by libyaml (included with Spack as an external module),
+and it's easy to read and versatile.
-The config files are structured as trees, like this ``compiler`` section::
+Config files are structured as trees, like this ``compiler`` section::
compilers:
chaos_5_x86_64_ib:
@@ -83,62 +83,73 @@ would looks like:
}
}
-Some routines, like get_mirrors_config and get_compilers_config may strip
-off the top-levels of the tree and return subtrees.
+Some convenience functions, like get_mirrors_config and
+``get_compilers_config`` may strip off the top-levels of the tree and
+return subtrees.
+
"""
import os
-import exceptions
import sys
-
-from external.ordereddict import OrderedDict
-from llnl.util.lang import memoized
-import spack.error
-
+import copy
from external import yaml
from external.yaml.error import MarkedYAMLError
+
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
+from llnl.util.lang import memoized
+
+import spack
+
_config_sections = {}
class _ConfigCategory:
name = None
filename = None
merge = True
- def __init__(self, n, f, m):
- self.name = n
- self.filename = f
- self.merge = m
+ def __init__(self, name, filename, merge, strip):
+ self.name = name
+ self.filename = filename
+ self.merge = merge
+ self.strip = strip
self.files_read_from = []
self.result_dict = {}
- _config_sections[n] = self
+ _config_sections[name] = self
-_ConfigCategory('compilers', 'compilers.yaml', True)
-_ConfigCategory('mirrors', 'mirrors.yaml', True)
-_ConfigCategory('view', 'views.yaml', True)
-_ConfigCategory('order', 'orders.yaml', True)
+_ConfigCategory('config', 'config.yaml', True, False)
+_ConfigCategory('compilers', 'compilers.yaml', True, True)
+_ConfigCategory('mirrors', 'mirrors.yaml', True, True)
+_ConfigCategory('view', 'views.yaml', True, True)
+_ConfigCategory('order', 'orders.yaml', True, True)
"""Names of scopes and their corresponding configuration files."""
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
('user', os.path.expanduser('~/.spack'))]
_compiler_by_arch = {}
-_read_config_file_result = {}
+
+@memoized
def _read_config_file(filename):
- """Read a given YAML configuration file"""
- global _read_config_file_result
- if filename in _read_config_file_result:
- return _read_config_file_result[filename]
+ """Read a YAML configuration file"""
+
+ # Ignore nonexisting files.
+ if not os.path.exists(filename):
+ return None
+
+ elif not os.path.isfile(filename):
+ tty.die("Invlaid configuration. %s exists but is not a file." % filename)
+
+ elif not os.access(filename, os.R_OK):
+ tty.die("Configuration file %s is not readable." % filename)
try:
with open(filename) as f:
- ydict = yaml.load(f)
+ return yaml.load(f)
+
except MarkedYAMLError, e:
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
- except exceptions.IOError, e:
- _read_config_file_result[filename] = None
- return None
- _read_config_file_result[filename] = ydict
- return ydict
+
+ except IOError, e:
+ tty.die("Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches():
@@ -147,41 +158,66 @@ def clear_config_caches():
for key,s in _config_sections.iteritems():
s.files_read_from = []
s.result_dict = {}
- spack.config._read_config_file_result = {}
+
+ _read_config_file.clear()
spack.config._compiler_by_arch = {}
spack.compilers._cached_default_compiler = None
-def _merge_dicts(d1, d2):
- """Recursively merges two configuration trees, with entries
- in d2 taking precedence over d1"""
- if not d1:
- return d2.copy()
- if not d2:
- return d1
+def _merge_yaml(dest, source):
+ """Merges source into dest; entries in source take precedence over dest.
- for key2, val2 in d2.iteritems():
- if not key2 in d1:
- d1[key2] = val2
- continue
- val1 = d1[key2]
- if isinstance(val1, dict) and isinstance(val2, dict):
- d1[key2] = _merge_dicts(val1, val2)
- continue
- if isinstance(val1, list) and isinstance(val2, list):
- val1.extend(val2)
- seen = set()
- d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
- continue
- d1[key2] = val2
- return d1
+ Config file authors can optionally end any attribute in a dict
+ with `::` instead of `:`, and the key will override that of the
+ parent instead of merging.
+ """
+ def they_are(t):
+ return isinstance(dest, t) and isinstance(source, t)
+ # If both are None, handle specially and return None.
+ if source is None and dest is None:
+ return None
-def get_config(category_name):
- """Get the confguration tree for the names category. Strips off the
- top-level category entry from the dict"""
- global config_scopes
- category = _config_sections[category_name]
+ # If source is None, overwrite with source.
+ elif source is None:
+ return None
+
+ # Source list is prepended (for precedence)
+ if they_are(list):
+ seen = set(source)
+ dest[:] = source + [x for x in dest if x not in seen]
+ return dest
+
+ # Source dict is merged into dest. Extra ':' means overwrite.
+ elif they_are(dict):
+ for sk, sv in source.iteritems():
+ # allow total override with, e.g., repos::
+ override = sk.endswith(':')
+ if override:
+ sk = sk.rstrip(':')
+
+ if override or not sk in dest:
+ dest[sk] = copy.copy(sv)
+ else:
+ dest[sk] = _merge_yaml(dest[sk], source[sk])
+ return dest
+
+ # In any other case, overwrite with a copy of the source value.
+ else:
+ return copy.copy(source)
+
+
+def substitute_spack_prefix(path):
+ """Replaces instances of $spack with Spack's prefix."""
+ return path.replace('$spack', spack.prefix)
+
+
+def get_config(category='config'):
+ """Get the confguration tree for a category.
+
+ Strips off the top-level category entry from the dict
+ """
+ category = _config_sections[category]
if category.result_dict:
return category.result_dict
@@ -191,14 +227,18 @@ def get_config(category_name):
result = _read_config_file(path)
if not result:
continue
- if not category_name in result:
- continue
+
+ if category.strip:
+ if not category.name in result:
+ continue
+ result = result[category.name]
+
category.files_read_from.insert(0, path)
- result = result[category_name]
if category.merge:
- category.result_dict = _merge_dicts(category.result_dict, result)
+ category.result_dict = _merge_yaml(category.result_dict, result)
else:
category.result_dict = result
+
return category.result_dict
@@ -215,7 +255,7 @@ def get_compilers_config(arch=None):
cc_config = get_config('compilers')
if arch in cc_config and 'all' in cc_config:
arch_compiler = dict(cc_config[arch])
- _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
+ _compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all'])
elif arch in cc_config:
_compiler_by_arch[arch] = cc_config[arch]
elif 'all' in cc_config:
@@ -225,6 +265,13 @@ def get_compilers_config(arch=None):
return _compiler_by_arch[arch]
+def get_repos_config():
+ config = get_config()
+ if 'repos' not in config:
+ return []
+ return config['repos']
+
+
def get_mirror_config():
"""Get the mirror configuration from config files"""
return get_config('mirrors')
@@ -232,7 +279,6 @@ def get_mirror_config():
def get_config_scope_dirname(scope):
"""For a scope return the config directory"""
- global config_scopes
for s,p in config_scopes:
if s == scope:
return p
@@ -251,16 +297,16 @@ def get_config_scope_filename(scope, category_name):
def add_to_config(category_name, addition_dict, scope=None):
"""Merge a new dict into a configuration tree and write the new
configuration to disk"""
- global _read_config_file_result
get_config(category_name)
category = _config_sections[category_name]
- #If scope is specified, use it. Otherwise use the last config scope that
- #we successfully parsed data from.
+ # If scope is specified, use it. Otherwise use the last config scope that
+ # we successfully parsed data from.
file = None
path = None
if not scope and not category.files_read_from:
scope = 'user'
+
if scope:
try:
dir = get_config_scope_dirname(scope)
@@ -268,32 +314,37 @@ def add_to_config(category_name, addition_dict, scope=None):
mkdirp(dir)
path = os.path.join(dir, category.filename)
file = open(path, 'w')
- except exceptions.IOError, e:
+ except IOError, e:
pass
else:
for p in category.files_read_from:
try:
file = open(p, 'w')
- except exceptions.IOError, e:
+ except IOError, e:
pass
if file:
path = p
break;
+
if not file:
tty.die('Unable to write to config file %s' % path)
- #Merge the new information into the existing file info, then write to disk
- new_dict = _read_config_file_result[path]
+ # Merge the new information into the existing file info, then write to disk
+ new_dict = _read_config_file(path)
+
if new_dict and category_name in new_dict:
new_dict = new_dict[category_name]
- new_dict = _merge_dicts(new_dict, addition_dict)
+
+ new_dict = _merge_yaml(new_dict, addition_dict)
new_dict = { category_name : new_dict }
- _read_config_file_result[path] = new_dict
+
+ # Install new dict as memoized value, and dump to disk
+ _read_config_file.cache[path] = new_dict
yaml.dump(new_dict, stream=file, default_flow_style=False)
file.close()
- #Merge the new information into the cached results
- category.result_dict = _merge_dicts(category.result_dict, addition_dict)
+ # Merge the new information into the cached results
+ category.result_dict = _merge_yaml(category.result_dict, addition_dict)
def add_to_mirror_config(addition_dict, scope=None):
@@ -311,7 +362,6 @@ def add_to_compiler_config(addition_dict, scope=None, arch=None):
def remove_from_config(category_name, key_to_rm, scope=None):
"""Remove a configuration key and write a new configuration to disk"""
- global config_scopes
get_config(category_name)
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
category = _config_sections[category_name]
diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py
index 3b9d74dd6e..c414234386 100644
--- a/lib/spack/spack/packages.py
+++ b/lib/spack/spack/packages.py
@@ -30,7 +30,9 @@ import glob
import imp
import spack.config
import re
-from contextlib import closing
+import itertools
+import traceback
+from external import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
@@ -44,7 +46,7 @@ from sets import Set
from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name
# Filename for package repo names
-packagerepo_filename = 'reponame'
+repo_config = 'repo.yaml'
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
@@ -56,56 +58,85 @@ def _autospec(function):
return converter
+def sliding_window(seq, n):
+ it = iter(seq)
+ result = tuple(itertools.islice(it, n))
+ if len(result) == n:
+ yield result
+ for elem in it:
+ result = result[1:] + (elem,)
+ yield result
+
+
class PackageDB(object):
- def __init__(self, default_root):
- """Construct a new package database from a root directory."""
-
- #Collect the repos from the config file and read their names from the file system
- repo_dirs = self._repo_list_from_config()
- repo_dirs.append(default_root)
- self.repos = [(self._read_reponame_from_directory(dir), dir) for dir in repo_dirs]
-
- # Check for duplicate repo names
- s = set()
- dups = set(r for r in self.repos if r[0] in s or s.add(r[0]))
- if dups:
- reponame = list(dups)[0][0]
- dir1 = list(dups)[0][1]
- dir2 = dict(s)[reponame]
- tty.die("Package repo %s in directory %s has the same name as the "
- "repo in directory %s" %
- (reponame, dir1, dir2))
+ def __init__(self, *repo_dirs):
+ """Construct a new package database from a list of directories.
+
+ Args:
+ repo_dirs List of directories containing packages.
+
+ If ``repo_dirs`` is empty, gets repository list from Spack configuration.
+ """
+ if not repo_dirs:
+ repo_dirs = spack.config.get_repos_config()
+ if not repo_dirs:
+ tty.die("Spack configuration contains no package repositories.")
+
+ # Collect the repos from the config file and read their names
+ # from the file system
+ repo_dirs = [spack.config.substitute_spack_prefix(rd) for rd in repo_dirs]
+
+ self.repos = []
+ for rdir in repo_dirs:
+ rname = self._read_reponame_from_directory(rdir)
+ if rname:
+ self.repos.append((self._read_reponame_from_directory(rdir), rdir))
+
+
+ by_path = sorted(self.repos, key=lambda r:r[1])
+ by_name = sorted(self.repos, key=lambda r:r[0])
+
+ for r1, r2 in by_path:
+ if r1[1] == r2[1]:
+ tty.die("Package repos are the same:",
+ " %20s %s" % r1, " %20s %s" % r2)
+
+ for r1, r2 in by_name:
+ if r1[0] == r2[0]:
+ tty.die("Package repos cannot have the same name:",
+ " %20s %s" % r1, " %20s %s" % r2)
# For each repo, create a RepoLoader
- self.repo_loaders = dict([(r[0], RepoLoader(r[0], r[1])) for r in self.repos])
+ self.repo_loaders = dict((name, RepoLoader(name, path))
+ for name, path in self.repos)
self.instances = {}
self.provider_index = None
def _read_reponame_from_directory(self, dir):
- """For a packagerepo directory, read the repo name from the dir/reponame file"""
- path = os.path.join(dir, packagerepo_filename)
+ """For a packagerepo directory, read the repo name from the
+ $root/repo.yaml file"""
+ path = os.path.join(dir, repo_config)
try:
- with closing(open(path, 'r')) as reponame_file:
- name = reponame_file.read().lstrip().rstrip()
- if not re.match(r'[a-zA-Z][a-zA-Z0-9]+', name):
- tty.die("Package repo name '%s', read from %s, is an invalid name. "
- "Repo names must began with a letter and only contain letters "
- "and numbers." % (name, path))
+ with open(path) as reponame_file:
+ yaml_data = yaml.load(reponame_file)
+
+ if (not yaml_data or
+ 'repo' not in yaml_data or
+ 'namespace' not in yaml_data['repo']):
+ tty.die("Invalid %s in %s" % (repo_config, dir))
+
+ name = yaml_data['repo']['namespace']
+ if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', name):
+ tty.die(
+ "Package repo name '%s', read from %s, is an invalid name. "
+ "Repo names must began with a letter and only contain "
+ "letters and numbers." % (name, path))
return name
except exceptions.IOError, e:
- tty.die("Could not read from package repo name file %s" % path)
-
-
-
- def _repo_list_from_config(self):
- """Read through the spackconfig and return the list of packagerepo directories"""
- config = spack.config.get_config()
- if not config.has_option('packagerepo', 'directories'): return []
- dir_string = config.get('packagerepo', 'directories')
- return dir_string.split(':')
+ tty.die("Error reading %s when opening %s" % (repo_config, dir))
@_autospec
@@ -125,7 +156,7 @@ class PackageDB(object):
except Exception, e:
if spack.debug:
sys.excepthook(*sys.exc_info())
- raise FailedConstructorError(spec.name, e)
+ raise FailedConstructorError(spec.name, *sys.exc_info())
return self.instances[spec]
@@ -304,8 +335,10 @@ class UnknownPackageError(spack.error.SpackError):
class FailedConstructorError(spack.error.SpackError):
"""Raised when a package's class constructor fails."""
- def __init__(self, name, reason):
+ def __init__(self, name, exc_type, exc_obj, exc_tb):
super(FailedConstructorError, self).__init__(
"Class constructor failed for package '%s'." % name,
- str(reason))
+ '\nCaused by:\n' +
+ ('%s: %s\n' % (exc_type.__name__, exc_obj)) +
+ ''.join(traceback.format_tb(exc_tb)))
self.name = name
diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py
index 6eaa1eead2..92da1cf709 100644
--- a/lib/spack/spack/repo_loader.py
+++ b/lib/spack/spack/repo_loader.py
@@ -1,8 +1,10 @@
-import spack
-import spack.repos
import re
+import sys
import types
+import traceback
+
from llnl.util.lang import *
+import spack
# Name of module under which packages are imported
imported_packages_module = 'spack.repos'
@@ -13,14 +15,30 @@ package_file_name = 'package.py'
import sys
class LazyLoader:
"""The LazyLoader handles cases when repo modules or classes
- are imported. It watches for 'spack.repos.*' loads, then
+ are imported. It watches for 'spack.repos.*' loads, then
redirects the load to the appropriate module."""
def find_module(self, fullname, pathname):
if not fullname.startswith(imported_packages_module):
return None
+
+ print "HERE ==="
+ print
+ for line in traceback.format_stack():
+ print " ", line.strip()
+ print
+ print "full: ", fullname
+ print "path: ", pathname
+ print
+
partial_name = fullname[len(imported_packages_module)+1:]
- repo = partial_name.split('.')[0]
- module = partial_name.split('.')[1]
+
+ print "partial: ", partial_name
+ print
+
+ last_dot = partial_name.rfind('.')
+ repo = partial_name[:last_dot]
+ module = partial_name[last_dot+1:]
+
repo_loader = spack.db.repo_loaders.get(repo)
if repo_loader:
try:
@@ -43,7 +61,7 @@ class RepoNamespace(types.ModuleType):
def __init__(self):
import sys
sys.modules[imported_packages_module] = self
-
+
def __getattr__(self, name):
if name in _reponames:
return _reponames[name]
@@ -62,7 +80,7 @@ class RepoLoader(types.ModuleType):
"""Each RepoLoader is associated with a repository, and the RepoLoader is
responsible for loading packages out of that repository. For example,
a RepoLoader may be responsible for spack.repos.original, and when someone
- references spack.repos.original.libelf that RepoLoader will load the
+ references spack.repos.original.libelf that RepoLoader will load the
libelf package."""
def __init__(self, reponame, repopath):
self.path = repopath
@@ -70,7 +88,6 @@ class RepoLoader(types.ModuleType):
self.module_name = imported_packages_module + '.' + reponame
if not reponame in _reponames:
_reponames[reponame] = self
- spack.repos.add_repo(reponame, self)
import sys
sys.modules[self.module_name] = self
@@ -111,5 +128,3 @@ class RepoLoader(types.ModuleType):
pkg_name, file_path, e.message))
return module
-
-
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 790b22f3b0..eed182a257 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -30,45 +30,85 @@ import spack
from spack.packages import PackageDB
from spack.test.mock_packages_test import *
+# Some sample compiler config data
+a_comps = {
+ "gcc@4.7.3" : {
+ "cc" : "/gcc473",
+ "cxx" : "/g++473",
+ "f77" : None,
+ "f90" : None },
+ "gcc@4.5.0" : {
+ "cc" : "/gcc450",
+ "cxx" : "/g++450",
+ "f77" : "/gfortran",
+ "f90" : "/gfortran" },
+ "clang@3.3" : {
+ "cc" : "<overwritten>",
+ "cxx" : "<overwritten>",
+ "f77" : "<overwritten>",
+ "f90" : "<overwritten>" }
+}
+
+b_comps = {
+ "icc@10.0" : {
+ "cc" : "/icc100",
+ "cxx" : "/icc100",
+ "f77" : None,
+ "f90" : None },
+ "icc@11.1" : {
+ "cc" : "/icc111",
+ "cxx" : "/icp111",
+ "f77" : "/ifort",
+ "f90" : "/ifort" },
+ "clang@3.3" : {
+ "cc" : "/clang",
+ "cxx" : "/clang++",
+ "f77" : None,
+ "f90" : None}
+}
+
class ConfigTest(MockPackagesTest):
def setUp(self):
- self.initmock()
+ super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
- spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
- ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+ spack.config.config_scopes = [
+ ('test_low_priority', os.path.join(self.tmp_dir, 'low')),
+ ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+
def tearDown(self):
- self.cleanmock()
+ super(ConfigTest, self).tearDown()
shutil.rmtree(self.tmp_dir, True)
- def check_config(self, comps):
+
+ def check_config(self, comps, *compiler_names):
+ """Check that named compilers in comps match Spack's config."""
config = spack.config.get_compilers_config()
compiler_list = ['cc', 'cxx', 'f77', 'f90']
- for key in comps:
+ for key in compiler_names:
for c in compiler_list:
- if comps[key][c] == '/bad':
- continue
self.assertEqual(comps[key][c], config[key][c])
- def test_write_key(self):
- a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
- "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
- "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
+ def test_write_key_in_memory(self):
+ # Write b_comps "on top of" a_comps.
+ spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
+ spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
+
+ # Make sure the config looks how we expect.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
- b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
- "icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
- "clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
+ def test_write_key_to_disk(self):
+ # Write b_comps "on top of" a_comps.
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
- self.check_config(a_comps)
- self.check_config(b_comps)
-
+ # Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
- self.check_config(a_comps)
- self.check_config(b_comps)
-
+ # Same check again, to ensure consistency.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')