summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbin/spack4
-rw-r--r--lib/spack/llnl/util/lang.py33
-rw-r--r--lib/spack/llnl/util/tty/colify.py7
-rw-r--r--lib/spack/spack/__init__.py20
-rw-r--r--lib/spack/spack/build_environment.py25
-rw-r--r--lib/spack/spack/cmd/create.py12
-rw-r--r--lib/spack/spack/cmd/edit.py35
-rw-r--r--lib/spack/spack/cmd/repo.py124
-rw-r--r--lib/spack/spack/config.py208
-rw-r--r--lib/spack/spack/directives.py3
-rw-r--r--lib/spack/spack/package.py6
-rw-r--r--lib/spack/spack/packages.py449
-rw-r--r--lib/spack/spack/patch.py4
-rw-r--r--lib/spack/spack/repo_loader.py110
-rw-r--r--lib/spack/spack/spec.py22
-rw-r--r--lib/spack/spack/test/config.py82
-rw-r--r--lib/spack/spack/test/directory_layout.py9
-rw-r--r--lib/spack/spack/test/mock_packages_test.py45
-rw-r--r--lib/spack/spack/test/package_sanity.py8
-rw-r--r--lib/spack/spack/test/packages.py6
-rw-r--r--lib/spack/spack/test/spec_dag.py12
-rw-r--r--lib/spack/spack/util/naming.py31
-rw-r--r--var/spack/mock_packages/_repo.yaml2
-rw-r--r--var/spack/packages/_repo.yaml2
24 files changed, 1032 insertions, 227 deletions
diff --git a/bin/spack b/bin/spack
index 127a85f6fe..efa1ccc281 100755
--- a/bin/spack
+++ b/bin/spack
@@ -113,8 +113,8 @@ def main():
spack.spack_working_dir = working_dir
if args.mock:
- from spack.packages import PackageDB
- spack.db = PackageDB(spack.mock_packages_path)
+ from spack.packages import PackageFinder
+ spack.db.swap(PackageFinder(spack.mock_packages_path))
# If the user asked for it, don't check ssl certs.
if args.insecure:
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 156ee34c9e..108fa98b36 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -26,6 +26,7 @@ import os
import re
import sys
import functools
+import collections
import inspect
# Ignore emacs backups when listing modules
@@ -167,16 +168,32 @@ def has_method(cls, name):
return False
-def memoized(obj):
+class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
- cache = obj.cache = {}
- @functools.wraps(obj)
- def memoizer(*args, **kwargs):
- if args not in cache:
- cache[args] = obj(*args, **kwargs)
- return cache[args]
- return memoizer
+ def __init__(self, func):
+ self.func = func
+ self.cache = {}
+
+
+ def __call__(self, *args):
+ if not isinstance(args, collections.Hashable):
+ # Not hashable, so just call the function.
+ return self.func(*args)
+
+ if args not in self.cache:
+ self.cache[args] = self.func(*args)
+ return self.cache[args]
+
+
+ def __get__(self, obj, objtype):
+ """Support instance methods."""
+ return functools.partial(self.__call__, obj)
+
+
+ def clear(self):
+ """Expunge cache so that self.func will be called again."""
+ self.cache.clear()
def list_modules(directory, **kwargs):
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index db928444c7..0c5227c6bd 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -210,6 +210,13 @@ def colify(elts, **options):
def colify_table(table, **options):
+ """Version of colify() for data expressed in rows, (list of lists).
+
+ Same as regular colify but takes a list of lists, where each
+ sub-list must be the same length, and each is interpreted as a
+ row in a table. Regular colify displays a sequential list of
+ values in columns.
+ """
if table is None:
raise TypeError("Can't call colify_table on NoneType")
elif not table or not table[0]:
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 1ecf662178..eccec12d3b 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -23,8 +23,10 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import sys
import tempfile
from llnl.util.filesystem import *
+import llnl.util.tty as tty
# This lives in $prefix/lib/spack/spack/__file__
spack_root = ancestor(__file__, 4)
@@ -41,6 +43,7 @@ test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
var_path = join_path(spack_root, "var", "spack")
stage_path = join_path(var_path, "stage")
+packages_path = join_path(var_path, "packages")
share_path = join_path(spack_root, "share", "spack")
prefix = spack_root
@@ -49,11 +52,20 @@ install_path = join_path(opt_path, "spack")
etc_path = join_path(prefix, "etc")
#
-# Set up the packages database.
+# Setup the spack.repos namespace
#
-from spack.packages import PackageDB
-packages_path = join_path(var_path, "packages")
-db = PackageDB(packages_path)
+from spack.repo_loader import RepoNamespace
+repos = RepoNamespace()
+
+#
+# Set up the default packages database.
+#
+import spack.packages
+_repo_paths = spack.config.get_repos_config()
+if not _repo_paths:
+ tty.die("Spack configuration contains no package repositories.")
+db = spack.packages.PackageFinder(*_repo_paths)
+sys.meta_path.append(db)
#
# Set up the installed packages database
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index dac25d9940..96033edc20 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -157,7 +157,7 @@ def set_build_environment_variables(pkg):
path_set("PKG_CONFIG_PATH", pkg_config_dirs)
-def set_module_variables_for_package(pkg):
+def set_module_variables_for_package(pkg, m):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
@@ -228,11 +228,32 @@ def get_rpaths(pkg):
return rpaths
+def parent_class_modules(cls):
+ """Get list of super class modules that are all descend from spack.Package"""
+ if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
+ return []
+ result = []
+ module = sys.modules.get(cls.__module__)
+ if module:
+ result = [ module ]
+ for c in cls.__bases__:
+ result.extend(parent_class_modules(c))
+ return result
+
+
def setup_package(pkg):
"""Execute all environment setup routines."""
set_compiler_environment_variables(pkg)
set_build_environment_variables(pkg)
- set_module_variables_for_package(pkg)
+
+ # If a user makes their own package repo, e.g.
+ # spack.repos.mystuff.libelf.Libelf, and they inherit from
+ # an existing class like spack.repos.original.libelf.Libelf,
+ # then set the module variables for both classes so the
+ # parent class can still use them if it gets called.
+ modules = parent_class_modules(pkg.__class__)
+ for mod in modules:
+ set_module_variables_for_package(pkg, mod)
# Allow dependencies to set up environment as well.
for dep_spec in pkg.spec.traverse(root=False):
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 46e6bcec14..1502942f2c 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -94,6 +94,9 @@ def setup_parser(subparser):
'-n', '--name', dest='alternate_name', default=None,
help="Override the autodetected name for the created package.")
subparser.add_argument(
+ '-p', '--package-repo', dest='package_repo', default=None,
+ help="Create the package in the specified packagerepo.")
+ subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Overwrite any existing package file with the same name.")
@@ -160,12 +163,21 @@ def create(parser, args):
tty.die("Couldn't guess a name for this package. Try running:", "",
"spack create --name <name> <url>")
+ package_repo = args.package_repo
+
if not valid_module_name(name):
tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
tty.msg("This looks like a URL for %s version %s." % (name, version))
tty.msg("Creating template for package %s" % name)
+ # Create a directory for the new package.
+ pkg_path = spack.db.filename_for_package_name(name, package_repo)
+ if os.path.exists(pkg_path) and not args.force:
+ tty.die("%s already exists." % pkg_path)
+ else:
+ mkdirp(os.path.dirname(pkg_path))
+
versions = spack.package.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index b8764ba391..9081d12516 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -78,9 +78,18 @@ def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', dest='force', action='store_true',
help="Open a new file in $EDITOR even if package doesn't exist.")
- subparser.add_argument(
- '-c', '--command', dest='edit_command', action='store_true',
- help="Edit the command with the supplied name instead of a package.")
+
+ filetypes = subparser.add_mutually_exclusive_group()
+ filetypes.add_argument(
+ '-c', '--command', dest='path', action='store_const',
+ const=spack.cmd.command_path, help="Edit the command with the supplied name.")
+ filetypes.add_argument(
+ '-t', '--test', dest='path', action='store_const',
+ const=spack.test_path, help="Edit the test with the supplied name.")
+ filetypes.add_argument(
+ '-m', '--module', dest='path', action='store_const',
+ const=spack.module_path, help="Edit the main spack module with the supplied name.")
+
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")
@@ -88,19 +97,17 @@ def setup_parser(subparser):
def edit(parser, args):
name = args.name
- if args.edit_command:
- if not name:
- path = spack.cmd.command_path
- else:
- path = join_path(spack.cmd.command_path, name + ".py")
- if not os.path.exists(path):
+ path = spack.packages_path
+ if args.path:
+ path = args.path
+ if name:
+ path = join_path(path, name + ".py")
+ if not args.force and not os.path.exists(path):
tty.die("No command named '%s'." % name)
spack.editor(path)
+ elif name:
+ edit_package(name, args.force)
else:
# By default open the directory where packages or commands live.
- if not name:
- path = spack.packages_path
- spack.editor(path)
- else:
- edit_package(name, args.force)
+ spack.editor(path)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
new file mode 100644
index 0000000000..e290f60b7b
--- /dev/null
+++ b/lib/spack/spack/cmd/repo.py
@@ -0,0 +1,124 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+import llnl.util.tty as tty
+from llnl.util.tty.color import colorize
+from llnl.util.tty.colify import colify
+from llnl.util.lang import index_by
+from llnl.util.filesystem import join_path, mkdirp
+
+import spack.spec
+import spack.config
+from spack.util.environment import get_path
+from spack.packages import repo_config_filename
+
+import os
+import exceptions
+from contextlib import closing
+
+description = "Manage package sources"
+
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(
+ metavar='SUBCOMMAND', dest='repo_command')
+
+ add_parser = sp.add_parser('add', help=repo_add.__doc__)
+ add_parser.add_argument('directory', help="Directory containing the packages.")
+
+ create_parser = sp.add_parser('create', help=repo_create.__doc__)
+ create_parser.add_argument('directory', help="Directory containing the packages.")
+ create_parser.add_argument('name', help="Name of new package repository.")
+
+ remove_parser = sp.add_parser('remove', help=repo_remove.__doc__)
+ remove_parser.add_argument('name')
+
+ list_parser = sp.add_parser('list', help=repo_list.__doc__)
+
+
+def add_to_config(dir):
+ config = spack.config.get_config()
+ user_config = spack.config.get_config('user')
+ orig = None
+ if config.has_value('repo', '', 'directories'):
+ orig = config.get_value('repo', '', 'directories')
+ if orig and dir in orig.split(':'):
+ return False
+
+ newsetting = orig + ':' + dir if orig else dir
+ user_config.set_value('repo', '', 'directories', newsetting)
+ user_config.write()
+ return True
+
+
+def repo_add(args):
+ """Add package sources to the Spack configuration."""
+ if not add_to_config(args.directory):
+ tty.die('Repo directory %s already exists in the repo list' % dir)
+
+
+def repo_create(args):
+ """Create a new package repo at a directory and name"""
+ dir = args.directory
+ name = args.name
+
+ if os.path.exists(dir) and not os.path.isdir(dir):
+ tty.die('File %s already exists and is not a directory' % dir)
+ if not os.path.exists(dir):
+ try:
+ mkdirp(dir)
+ except exceptions.OSError, e:
+ tty.die('Failed to create new directory %s' % dir)
+ path = os.path.join(dir, repo_config_filename)
+ try:
+ with closing(open(path, 'w')) as repofile:
+ repofile.write(name + '\n')
+ except exceptions.IOError, e:
+ tty.die('Could not create new file %s' % path)
+
+ if not add_to_config(args.directory):
+ tty.warn('Repo directory %s already exists in the repo list' % dir)
+
+
+def repo_remove(args):
+ """Remove a package source from the Spack configuration"""
+ pass
+
+
+def repo_list(args):
+ """List package sources and their mnemoics"""
+ root_names = spack.db.repos
+ max_len = max(len(s[0]) for s in root_names)
+ fmt = "%%-%ds%%s" % (max_len + 4)
+ for root in root_names:
+ print fmt % (root[0], root[1])
+
+
+
+def repo(parser, args):
+ action = { 'add' : repo_add,
+ 'create' : repo_create,
+ 'remove' : repo_remove,
+ 'list' : repo_list }
+ action[args.repo_command](args)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 3e91958c2c..66da91f629 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -45,11 +45,11 @@ several configuration files, such as compilers.yaml or mirrors.yaml.
Configuration file format
===============================
-Configuration files are formatted using YAML syntax.
-This format is implemented by Python's
-yaml class, and it's easy to read and versatile.
+Configuration files are formatted using YAML syntax. This format is
+implemented by libyaml (included with Spack as an external module),
+and it's easy to read and versatile.
-The config files are structured as trees, like this ``compiler`` section::
+Config files are structured as trees, like this ``compiler`` section::
compilers:
chaos_5_x86_64_ib:
@@ -83,62 +83,73 @@ would looks like:
}
}
-Some routines, like get_mirrors_config and get_compilers_config may strip
-off the top-levels of the tree and return subtrees.
+Some convenience functions, like get_mirrors_config and
+``get_compilers_config`` may strip off the top-levels of the tree and
+return subtrees.
+
"""
import os
-import exceptions
import sys
-
-from external.ordereddict import OrderedDict
-from llnl.util.lang import memoized
-import spack.error
-
+import copy
from external import yaml
from external.yaml.error import MarkedYAMLError
+
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
+from llnl.util.lang import memoized
+
+import spack
+
_config_sections = {}
class _ConfigCategory:
name = None
filename = None
merge = True
- def __init__(self, n, f, m):
- self.name = n
- self.filename = f
- self.merge = m
+ def __init__(self, name, filename, merge, strip):
+ self.name = name
+ self.filename = filename
+ self.merge = merge
+ self.strip = strip
self.files_read_from = []
self.result_dict = {}
- _config_sections[n] = self
+ _config_sections[name] = self
-_ConfigCategory('compilers', 'compilers.yaml', True)
-_ConfigCategory('mirrors', 'mirrors.yaml', True)
-_ConfigCategory('view', 'views.yaml', True)
-_ConfigCategory('order', 'orders.yaml', True)
+_ConfigCategory('config', 'config.yaml', True, False)
+_ConfigCategory('compilers', 'compilers.yaml', True, True)
+_ConfigCategory('mirrors', 'mirrors.yaml', True, True)
+_ConfigCategory('view', 'views.yaml', True, True)
+_ConfigCategory('order', 'orders.yaml', True, True)
"""Names of scopes and their corresponding configuration files."""
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
('user', os.path.expanduser('~/.spack'))]
_compiler_by_arch = {}
-_read_config_file_result = {}
+
+@memoized
def _read_config_file(filename):
- """Read a given YAML configuration file"""
- global _read_config_file_result
- if filename in _read_config_file_result:
- return _read_config_file_result[filename]
+ """Read a YAML configuration file"""
+
+ # Ignore nonexisting files.
+ if not os.path.exists(filename):
+ return None
+
+ elif not os.path.isfile(filename):
+ tty.die("Invlaid configuration. %s exists but is not a file." % filename)
+
+ elif not os.access(filename, os.R_OK):
+ tty.die("Configuration file %s is not readable." % filename)
try:
with open(filename) as f:
- ydict = yaml.load(f)
+ return yaml.load(f)
+
except MarkedYAMLError, e:
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
- except exceptions.IOError, e:
- _read_config_file_result[filename] = None
- return None
- _read_config_file_result[filename] = ydict
- return ydict
+
+ except IOError, e:
+ tty.die("Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches():
@@ -147,41 +158,66 @@ def clear_config_caches():
for key,s in _config_sections.iteritems():
s.files_read_from = []
s.result_dict = {}
- spack.config._read_config_file_result = {}
+
+ _read_config_file.clear()
spack.config._compiler_by_arch = {}
spack.compilers._cached_default_compiler = None
-def _merge_dicts(d1, d2):
- """Recursively merges two configuration trees, with entries
- in d2 taking precedence over d1"""
- if not d1:
- return d2.copy()
- if not d2:
- return d1
+def _merge_yaml(dest, source):
+ """Merges source into dest; entries in source take precedence over dest.
- for key2, val2 in d2.iteritems():
- if not key2 in d1:
- d1[key2] = val2
- continue
- val1 = d1[key2]
- if isinstance(val1, dict) and isinstance(val2, dict):
- d1[key2] = _merge_dicts(val1, val2)
- continue
- if isinstance(val1, list) and isinstance(val2, list):
- val1.extend(val2)
- seen = set()
- d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
- continue
- d1[key2] = val2
- return d1
+ Config file authors can optionally end any attribute in a dict
+ with `::` instead of `:`, and the key will override that of the
+ parent instead of merging.
+ """
+ def they_are(t):
+ return isinstance(dest, t) and isinstance(source, t)
+
+ # If both are None, handle specially and return None.
+ if source is None and dest is None:
+ return None
+ # If source is None, overwrite with source.
+ elif source is None:
+ return None
-def get_config(category_name):
- """Get the confguration tree for the names category. Strips off the
- top-level category entry from the dict"""
- global config_scopes
- category = _config_sections[category_name]
+ # Source list is prepended (for precedence)
+ if they_are(list):
+ seen = set(source)
+ dest[:] = source + [x for x in dest if x not in seen]
+ return dest
+
+ # Source dict is merged into dest. Extra ':' means overwrite.
+ elif they_are(dict):
+ for sk, sv in source.iteritems():
+ # allow total override with, e.g., repos::
+ override = sk.endswith(':')
+ if override:
+ sk = sk.rstrip(':')
+
+ if override or not sk in dest:
+ dest[sk] = copy.copy(sv)
+ else:
+ dest[sk] = _merge_yaml(dest[sk], source[sk])
+ return dest
+
+ # In any other case, overwrite with a copy of the source value.
+ else:
+ return copy.copy(source)
+
+
+def substitute_spack_prefix(path):
+ """Replaces instances of $spack with Spack's prefix."""
+ return path.replace('$spack', spack.prefix)
+
+
+def get_config(category='config'):
+ """Get the confguration tree for a category.
+
+ Strips off the top-level category entry from the dict
+ """
+ category = _config_sections[category]
if category.result_dict:
return category.result_dict
@@ -191,14 +227,18 @@ def get_config(category_name):
result = _read_config_file(path)
if not result:
continue
- if not category_name in result:
- continue
+
+ if category.strip:
+ if not category.name in result:
+ continue
+ result = result[category.name]
+
category.files_read_from.insert(0, path)
- result = result[category_name]
if category.merge:
- category.result_dict = _merge_dicts(category.result_dict, result)
+ category.result_dict = _merge_yaml(category.result_dict, result)
else:
category.result_dict = result
+
return category.result_dict
@@ -215,7 +255,7 @@ def get_compilers_config(arch=None):
cc_config = get_config('compilers')
if arch in cc_config and 'all' in cc_config:
arch_compiler = dict(cc_config[arch])
- _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
+ _compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all'])
elif arch in cc_config:
_compiler_by_arch[arch] = cc_config[arch]
elif 'all' in cc_config:
@@ -225,6 +265,15 @@ def get_compilers_config(arch=None):
return _compiler_by_arch[arch]
+def get_repos_config():
+ config = get_config()
+ if 'repos' not in config:
+ return []
+
+ repo_list = config['repos']
+ return [substitute_spack_prefix(repo) for repo in repo_list]
+
+
def get_mirror_config():
"""Get the mirror configuration from config files"""
return get_config('mirrors')
@@ -232,7 +281,6 @@ def get_mirror_config():
def get_config_scope_dirname(scope):
"""For a scope return the config directory"""
- global config_scopes
for s,p in config_scopes:
if s == scope:
return p
@@ -251,16 +299,16 @@ def get_config_scope_filename(scope, category_name):
def add_to_config(category_name, addition_dict, scope=None):
"""Merge a new dict into a configuration tree and write the new
configuration to disk"""
- global _read_config_file_result
get_config(category_name)
category = _config_sections[category_name]
- #If scope is specified, use it. Otherwise use the last config scope that
- #we successfully parsed data from.
+ # If scope is specified, use it. Otherwise use the last config scope that
+ # we successfully parsed data from.
file = None
path = None
if not scope and not category.files_read_from:
scope = 'user'
+
if scope:
try:
dir = get_config_scope_dirname(scope)
@@ -268,32 +316,37 @@ def add_to_config(category_name, addition_dict, scope=None):
mkdirp(dir)
path = os.path.join(dir, category.filename)
file = open(path, 'w')
- except exceptions.IOError, e:
+ except IOError, e:
pass
else:
for p in category.files_read_from:
try:
file = open(p, 'w')
- except exceptions.IOError, e:
+ except IOError, e:
pass
if file:
path = p
break;
+
if not file:
tty.die('Unable to write to config file %s' % path)
- #Merge the new information into the existing file info, then write to disk
- new_dict = _read_config_file_result[path]
+ # Merge the new information into the existing file info, then write to disk
+ new_dict = _read_config_file(path)
+
if new_dict and category_name in new_dict:
new_dict = new_dict[category_name]
- new_dict = _merge_dicts(new_dict, addition_dict)
+
+ new_dict = _merge_yaml(new_dict, addition_dict)
new_dict = { category_name : new_dict }
- _read_config_file_result[path] = new_dict
+
+ # Install new dict as memoized value, and dump to disk
+ _read_config_file.cache[path] = new_dict
yaml.dump(new_dict, stream=file, default_flow_style=False)
file.close()
- #Merge the new information into the cached results
- category.result_dict = _merge_dicts(category.result_dict, addition_dict)
+ # Merge the new information into the cached results
+ category.result_dict = _merge_yaml(category.result_dict, addition_dict)
def add_to_mirror_config(addition_dict, scope=None):
@@ -311,7 +364,6 @@ def add_to_compiler_config(addition_dict, scope=None, arch=None):
def remove_from_config(category_name, key_to_rm, scope=None):
"""Remove a configuration key and write a new configuration to disk"""
- global config_scopes
get_config(category_name)
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
category = _config_sections[category_name]
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 78039ac6f9..3937aef450 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -238,11 +238,10 @@ def patch(pkg, url_or_filename, level=1, when=None):
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
-
cur_patches = pkg.patches.setdefault(when_spec, [])
# if this spec is identical to some other, then append this
# patch to the existing list.
- cur_patches.append(Patch(pkg.name, url_or_filename, level))
+ cur_patches.append(Patch(pkg, url_or_filename, level))
@directive('variants')
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index c631a35bf3..39d71bb4b9 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -722,6 +722,12 @@ class Package(object):
tty.msg("Patched %s" % self.name)
+ @property
+ def namespace(self):
+ namespace, dot, module = self.__module__.rpartition('.')
+ return namespace
+
+
def do_fake_install(self):
"""Make a fake install directory contaiing a 'fake' file in bin."""
mkdirp(self.prefix.bin)
diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py
index 2e3e95ca40..6005523bc0 100644
--- a/lib/spack/spack/packages.py
+++ b/lib/spack/spack/packages.py
@@ -23,10 +23,15 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import exceptions
import sys
import inspect
import glob
import imp
+import re
+import itertools
+import traceback
+from external import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
@@ -35,72 +40,340 @@ from llnl.util.lang import *
import spack.error
import spack.spec
from spack.virtual import ProviderIndex
-from spack.util.naming import mod_to_class, validate_module_name
+from spack.util.naming import *
-# Name of module under which packages are imported
-_imported_packages_module = 'spack.packages'
-
-# Name of the package file inside a package directory
-_package_file_name = 'package.py'
+# Filename for package repo names
+repo_config_filename = '_repo.yaml'
+# Filename for packages in repos.
+package_file_name = 'package.py'
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
function to a Spec."""
- def converter(self, spec_like, **kwargs):
+ def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
spec_like = spack.spec.Spec(spec_like)
- return function(self, spec_like, **kwargs)
+ return function(self, spec_like, *args, **kwargs)
return converter
+class NamespaceTrie(object):
+ def __init__(self):
+ self._elements = {}
+
+
+ def __setitem__(self, namespace, repo):
+ parts = namespace.split('.')
+ cur = self._elements
+ for p in parts[:-1]:
+ if p not in cur:
+ cur[p] = {}
+ cur = cur[p]
+
+ cur[parts[-1]] = repo
+
+
+ def __getitem__(self, namespace):
+ parts = namespace.split('.')
+ cur = self._elements
+ for p in parts:
+ if p not in cur:
+ raise KeyError("Can't find namespace %s in trie" % namespace)
+ cur = cur[p]
+ return cur
+
+
+ def __contains__(self, namespace):
+ parts = namespace.split('.')
+ cur = self._elements
+ for p in parts:
+ if not isinstance(cur, dict):
+ return False
+ if p not in cur:
+ return False
+ cur = cur[p]
+ return True
+
+
+
+class PackageFinder(object):
+ """A PackageFinder is a wrapper around a list of PackageDBs.
+
+ It functions exactly like a PackageDB, but it operates on the
+ combined results of the PackageDBs in its list instead of on a
+ single package repository.
+ """
+ def __init__(self, *repo_dirs):
+ self.repos = []
+ self.by_namespace = NamespaceTrie()
+ self.by_path = {}
+
+ for root in repo_dirs:
+ repo = PackageDB(root)
+ self.put_last(repo)
+
+
+ def _check_repo(self, repo):
+ if repo.root in self.by_path:
+ raise DuplicateRepoError("Package repos are the same",
+ repo, self.by_path[repo.root])
+
+ if repo.namespace in self.by_namespace:
+ tty.error("Package repos cannot have the same name",
+ repo, self.by_namespace[repo.namespace])
+
+
+ def _add(self, repo):
+ self._check_repo(repo)
+ self.by_namespace[repo.namespace] = repo
+ self.by_path[repo.root] = repo
+
+
+ def put_first(self, repo):
+ self._add(repo)
+ self.repos.insert(0, repo)
+
+
+ def put_last(self, repo):
+ self._add(repo)
+ self.repos.append(repo)
+
+
+ def remove(self, repo):
+ if repo in self.repos:
+ self.repos.remove(repo)
+
+
+ def swap(self, other):
+ repos = self.repos
+ by_namespace = self.by_namespace
+ by_path = self.by_path
+
+ self.repos = other.repos
+ self.by_namespace = other.by_namespace
+ self.by_pah = other.by_path
+
+ other.repos = repos
+ other.by_namespace = by_namespace
+ other.by_path = by_path
+
+
+ def all_package_names(self):
+ all_pkgs = set()
+ for repo in self.repos:
+ all_pkgs.update(set(repo.all_package_names()))
+ return all_pkgs
+
+
+ def all_packages(self):
+ for name in self.all_package_names():
+ yield self.get(name)
+
+
+ def providers_for(self, vpkg_name):
+ # TODO: THIS IS WRONG; shoudl use more than first repo
+ return self.repos[0].providers_for(vpkg_name)
+
+
+ def _get_spack_pkg_name(self, repo, py_module_name):
+ """Allow users to import Spack packages using legal Python identifiers.
+
+ A python identifier might map to many different Spack package
+ names due to hyphen/underscore ambiguity.
+
+ Easy example:
+ num3proxy -> 3proxy
+
+ Ambiguous:
+ foo_bar -> foo_bar, foo-bar
+
+ More ambiguous:
+ foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
+ """
+ if py_module_name in repo:
+ return py_module_name
+
+ options = possible_spack_module_names(py_module_name)
+ options.remove(py_module_name)
+ for name in options:
+ if name in repo:
+ return name
+
+ return None
+
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.by_namespace:
+ return self
+
+ namespace, dot, module_name = fullname.rpartition('.')
+ if namespace not in self.by_namespace:
+ return None
+
+ repo = self.by_namespace[namespace]
+ name = self._get_spack_pkg_name(repo, module_name)
+ if not name:
+ return None
+
+ return self
+
+
+ def load_module(self, fullname):
+ if fullname in sys.modules:
+ return sys.modules[fullname]
+
+ if fullname in self.by_namespace:
+ ns = self.by_namespace[fullname]
+ module = imp.new_module(fullname)
+ module.__file__ = "<spack-namespace>"
+ module.__path__ = []
+ module.__package__ = fullname
+
+ else:
+ namespace, dot, module_name = fullname.rpartition('.')
+ if namespace not in self.by_namespace:
+ raise ImportError(
+ "No Spack repository with namespace %s" % namespace)
+
+ repo = self.by_namespace[namespace]
+ name = self._get_spack_pkg_name(repo, module_name)
+ if not name:
+ raise ImportError(
+ "No module %s in Spack repository %s" % (module_name, repo))
+
+ fullname = namespace + '.' + name
+ file_path = os.path.join(repo.root, name, package_file_name)
+ module = imp.load_source(fullname, file_path)
+ module.__package__ = namespace
+
+ module.__loader__ = self
+ sys.modules[fullname] = module
+ return module
+
+
+ def _find_repo_for_spec(self, spec):
+ """Find a repo that contains the supplied spec's package.
+
+ Raises UnknownPackageErrorfor if not found.
+ """
+ for repo in self.repos:
+ if spec.name in repo:
+ return repo
+ raise UnknownPackageError(spec.name)
+
+
+ @_autospec
+ def get(self, spec, new=False):
+ return self._find_repo_for_spec(spec).get(spec, new)
+
+
+ def get_repo(self, namespace):
+ if namespace in self.by_namespace:
+ repo = self.by_namespace[namespace]
+ if isinstance(repo, PackageDB):
+ return repo
+ return None
+
+
+ def exists(self, pkg_name):
+ return any(repo.exists(pkg_name) for repo in self.repos)
+
+
+ def __contains__(self, pkg_name):
+ return self.exists(pkg_name)
+
+
+
class PackageDB(object):
+ """Class representing a package repository in the filesystem.
+
+ Each package repository must have a top-level configuration file
+ called `_repo.yaml`.
+
+ Currently, `_repo.yaml` this must define:
+
+ `namespace`:
+ A Python namespace where the repository's packages should live.
+
+ """
def __init__(self, root):
- """Construct a new package database from a root directory."""
+ """Instantiate a package repository from a filesystem path."""
+ # Root directory, containing _repo.yaml and package dirs
self.root = root
- self.instances = {}
- self.provider_index = None
+
+ # Config file in <self.root>/_repo.yaml
+ self.config_file = os.path.join(self.root, repo_config_filename)
+
+ # Read configuration from _repo.yaml
+ config = self._read_config()
+ if not 'namespace' in config:
+ tty.die('Package repo in %s must define a namespace in %s.'
+ % (self.root, repo_config_filename))
+
+ # Check namespace in the repository configuration.
+ self.namespace = config['namespace']
+ if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace):
+ tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be "
+ "valid python identifiers separated by '.'")
+ % (self.namespace, self.root))
+
+ # These are internal cache variables.
+ self._instances = {}
+ self._provider_index = None
+
+
+ def _read_config(self):
+ """Check for a YAML config file in this db's root directory."""
+ try:
+ with open(self.config_file) as reponame_file:
+ yaml_data = yaml.load(reponame_file)
+
+ if (not yaml_data or 'repo' not in yaml_data or
+ not isinstance(yaml_data['repo'], dict)):
+ tty.die("Invalid %s in repository %s"
+ % (repo_config_filename, self.root))
+
+ return yaml_data['repo']
+
+ except exceptions.IOError, e:
+ tty.die("Error reading %s when opening %s"
+ % (self.config_file, self.root))
@_autospec
- def get(self, spec, **kwargs):
+ def get(self, spec, new=False):
if spec.virtual:
raise UnknownPackageError(spec.name)
- if kwargs.get('new', False):
- if spec in self.instances:
- del self.instances[spec]
+ if new:
+ if spec in self._instances:
+ del self._instances[spec]
- if not spec in self.instances:
- package_class = self.get_class_for_package_name(spec.name)
+ if not spec in self._instances:
+ package_class = self.get_class_for_package_name(spec.name, spec.namespace)
try:
copy = spec.copy()
- self.instances[copy] = package_class(copy)
+ self._instances[copy] = package_class(copy)
except Exception, e:
if spack.debug:
sys.excepthook(*sys.exc_info())
- raise FailedConstructorError(spec.name, e)
+ raise FailedConstructorError(spec.name, *sys.exc_info())
- return self.instances[spec]
-
-
- @_autospec
- def delete(self, spec):
- """Force a package to be recreated."""
- del self.instances[spec]
+ return self._instances[spec]
def purge(self):
"""Clear entire package instance cache."""
- self.instances.clear()
+ self._instances.clear()
@_autospec
def providers_for(self, vpkg_spec):
- if self.provider_index is None:
- self.provider_index = ProviderIndex(self.all_package_names())
+ if self._provider_index is None:
+ self._provider_index = ProviderIndex(self.all_package_names())
- providers = self.provider_index.providers_for(vpkg_spec)
+ providers = self._provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.name)
return providers
@@ -128,21 +401,23 @@ class PackageDB(object):
"""
validate_module_name(pkg_name)
pkg_dir = self.dirname_for_package_name(pkg_name)
- return join_path(pkg_dir, _package_file_name)
+
+ return join_path(pkg_dir, package_file_name)
@memoized
def all_package_names(self):
"""Generator function for all packages. This looks for
- ``<pkg_name>/package.py`` files within the root direcotry"""
+ ``<pkg_name>/package.py`` files within the repo direcotories"""
all_package_names = []
+
for pkg_name in os.listdir(self.root):
pkg_dir = join_path(self.root, pkg_name)
- pkg_file = join_path(pkg_dir, _package_file_name)
+ pkg_file = join_path(pkg_dir, package_file_name)
if os.path.isfile(pkg_file):
all_package_names.append(pkg_name)
- all_package_names.sort()
- return all_package_names
+
+ return sorted(all_package_names)
def all_packages(self):
@@ -152,21 +427,13 @@ class PackageDB(object):
@memoized
def exists(self, pkg_name):
- """Whether a package with the supplied name exists ."""
+ """Whether a package with the supplied name exists."""
return os.path.exists(self.filename_for_package_name(pkg_name))
@memoized
- def get_class_for_package_name(self, pkg_name):
- """Get an instance of the class for a particular package.
-
- This method uses Python's ``imp`` package to load python
- source from a Spack package's ``package.py`` file. A
- normal python import would only load each package once, but
- because we do this dynamically, the method needs to be
- memoized to ensure there is only ONE package class
- instance, per package, per database.
- """
+ def get_class_for_package_name(self, pkg_name, reponame = None):
+ """Get an instance of the class for a particular package."""
file_path = self.filename_for_package_name(pkg_name)
if os.path.exists(file_path):
@@ -175,17 +442,10 @@ class PackageDB(object):
if not os.access(file_path, os.R_OK):
tty.die("Cannot read '%s'!" % file_path)
else:
- raise UnknownPackageError(pkg_name)
+ raise UnknownPackageError(pkg_name, self.namespace)
class_name = mod_to_class(pkg_name)
- try:
- module_name = _imported_packages_module + '.' + pkg_name
- module = imp.load_source(module_name, file_path)
-
- except ImportError, e:
- tty.die("Error while importing %s from %s:\n%s" % (
- pkg_name, file_path, e.message))
-
+ module = __import__(self.namespace + '.' + pkg_name, fromlist=[class_name])
cls = getattr(module, class_name)
if not inspect.isclass(cls):
tty.die("%s.%s is not a class" % (pkg_name, class_name))
@@ -193,17 +453,88 @@ class PackageDB(object):
return cls
+ def __str__(self):
+ return "<PackageDB '%s' from '%s'>" % (self.namespace, self.root)
+
+
+ def __repr__(self):
+ return self.__str__()
+
+
+ def __contains__(self, pkg_name):
+ return self.exists(pkg_name)
+
+
+ #
+ # Below functions deal with installed packages, and should be
+ # moved to some other part of Spack (conbine with
+ # directory_layout?)
+ #
+ @_autospec
+ def get_installed(self, spec):
+ """Get all the installed specs that satisfy the provided spec constraint."""
+ return [s for s in self.installed_package_specs() if s.satisfies(spec)]
+
+
+ @_autospec
+ def installed_extensions_for(self, extendee_spec):
+ for s in self.installed_package_specs():
+ try:
+ if s.package.extends(extendee_spec):
+ yield s.package
+ except UnknownPackageError, e:
+ # Skip packages we know nothing about
+ continue
+
+
+ def installed_package_specs(self):
+ """Read installed package names straight from the install directory
+ layout.
+ """
+ # Get specs from the directory layout but ensure that they're
+ # all normalized properly.
+ installed = []
+ for spec in spack.install_layout.all_specs():
+ spec.normalize()
+ installed.append(spec)
+ return installed
+
+
+ def installed_known_package_specs(self):
+ """Read installed package names straight from the install
+ directory layout, but return only specs for which the
+ package is known to this version of spack.
+ """
+ for spec in spack.install_layout.all_specs():
+ if self.exists(spec.name):
+ yield spec
+
+
class UnknownPackageError(spack.error.SpackError):
"""Raised when we encounter a package spack doesn't have."""
- def __init__(self, name):
- super(UnknownPackageError, self).__init__("Package '%s' not found." % name)
+ def __init__(self, name, repo=None):
+ msg = None
+ if repo:
+ msg = "Package %s not found in packagerepo %s." % (name, repo)
+ else:
+ msg = "Package %s not found." % name
+ super(UnknownPackageError, self).__init__(msg)
self.name = name
+class DuplicateRepoError(spack.error.SpackError):
+ """Raised when duplicate repos are added to a PackageFinder."""
+ def __init__(self, msg, repo1, repo2):
+ super(UnknownPackageError, self).__init__(
+ "%s: %s, %s" % (msg, repo1, repo2))
+
+
class FailedConstructorError(spack.error.SpackError):
"""Raised when a package's class constructor fails."""
- def __init__(self, name, reason):
+ def __init__(self, name, exc_type, exc_obj, exc_tb):
super(FailedConstructorError, self).__init__(
"Class constructor failed for package '%s'." % name,
- str(reason))
+ '\nCaused by:\n' +
+ ('%s: %s\n' % (exc_type.__name__, exc_obj)) +
+ ''.join(traceback.format_tb(exc_tb)))
self.name = name
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index b1b6e07738..da5fa1646b 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -41,8 +41,8 @@ class Patch(object):
"""This class describes a patch to be applied to some expanded
source code."""
- def __init__(self, pkg_name, path_or_url, level):
- self.pkg_name = pkg_name
+ def __init__(self, pkg, path_or_url, level):
+ self.pkg_name = pkg.name
self.path_or_url = path_or_url
self.path = None
self.url = None
diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py
new file mode 100644
index 0000000000..441011cf98
--- /dev/null
+++ b/lib/spack/spack/repo_loader.py
@@ -0,0 +1,110 @@
+import re
+import sys
+import types
+import traceback
+
+from llnl.util.lang import *
+import spack
+
+# Name of module under which packages are imported
+imported_packages_module = 'spack.repos'
+
+# Name of the package file inside a package directory
+package_file_name = 'package.py'
+
+class LazyLoader:
+ """The LazyLoader handles cases when repo modules or classes
+ are imported. It watches for 'spack.repos.*' loads, then
+ redirects the load to the appropriate module."""
+ def find_module(self, fullname, pathname):
+ if not fullname.startswith(imported_packages_module):
+ return None
+
+ partial_name = fullname[len(imported_packages_module)+1:]
+
+ print "partial: ", partial_name
+ print
+
+ last_dot = partial_name.rfind('.')
+ repo = partial_name[:last_dot]
+ module = partial_name[last_dot+1:]
+
+ repo_loader = spack.db.repo_loaders.get(repo)
+ if repo_loader:
+ try:
+ self.mod = repo_loader.get_module(module)
+ return self
+ except (ImportError, spack.packages.UnknownPackageError):
+ return None
+
+ def load_module(self, fullname):
+ return self.mod
+
+#sys.meta_path.append(LazyLoader())
+
+_reponames = {}
+class RepoNamespace(types.ModuleType):
+ """The RepoNamespace holds the repository namespaces under
+ spack.repos. For example, when accessing spack.repos.original
+ this class will use __getattr__ to translate the 'original'
+ into one of spack's known repositories"""
+ def __init__(self):
+ sys.modules[imported_packages_module] = self
+
+ def __getattr__(self, name):
+ if name in _reponames:
+ return _reponames[name]
+ raise AttributeError
+
+ @property
+ def __file__(self):
+ return None
+
+ @property
+ def __path__(self):
+ return []
+
+
+class RepoLoader(types.ModuleType):
+ """Each RepoLoader is associated with a repository, and the RepoLoader is
+ responsible for loading packages out of that repository. For example,
+ a RepoLoader may be responsible for spack.repos.original, and when someone
+ references spack.repos.original.libelf that RepoLoader will load the
+ libelf package."""
+ def __init__(self, reponame, repopath):
+ self.path = repopath
+ self.reponame = reponame
+ self.module_name = imported_packages_module + '.' + reponame
+ if not reponame in _reponames:
+ _reponames[reponame] = self
+
+ sys.modules[self.module_name] = self
+
+
+ @property
+ def __path__(self):
+ return [ self.path ]
+
+
+ def __getattr__(self, name):
+ if name[0] == '_':
+ raise AttributeError
+ return self.get_module(name)
+
+
+ @memoized
+ def get_module(self, pkg_name):
+ import os
+ import imp
+ import llnl.util.tty as tty
+
+
+ try:
+ module_name = imported_packages_module + '.' + self.reponame + '.' + pkg_name
+ module = imp.load_source(module_name, file_path)
+
+ except ImportError, e:
+ tty.die("Error while importing %s from %s:\n%s" % (
+ pkg_name, file_path, e.message))
+
+ return module
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 7b79feb311..5e59f240a4 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -412,6 +412,7 @@ class Spec(object):
self.dependencies = other.dependencies
self.variants = other.variants
self.variants.spec = self
+ self.namespace = other.namespace
# Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal.
@@ -1357,6 +1358,7 @@ class Spec(object):
self.dependencies = DependencyMap()
self.variants = other.variants.copy()
self.variants.spec = self
+ self.namespace = other.namespace
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
@@ -1505,6 +1507,7 @@ class Spec(object):
in the format string. The format strings you can provide are::
$_ Package name
+ $. Long package name
$@ Version
$% Compiler
$%@ Compiler & compiler version
@@ -1552,6 +1555,9 @@ class Spec(object):
if c == '_':
out.write(fmt % self.name)
+ elif c == '.':
+ longname = '%s.%s.%s' % (self.namespace, self.name) if self.namespace else self.name
+ out.write(fmt % longname)
elif c == '@':
if self.versions and self.versions != _any_version:
write(fmt % (c + str(self.versions)), c)
@@ -1700,17 +1706,23 @@ class SpecParser(spack.parse.Parser):
def spec(self):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
- self.check_identifier()
+
+ spec_namespace, dot, spec_name = self.token.value.rpartition('.')
+ if not spec_namespace:
+ spec_namespace = None
+
+ self.check_identifier(spec_name)
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
- spec.name = self.token.value
+ spec.name = spec_name
spec.versions = VersionList()
spec.variants = VariantMap(spec)
spec.architecture = None
spec.compiler = None
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
+ spec.namespace = spec_namespace
spec._normal = False
spec._concrete = False
@@ -1804,12 +1816,14 @@ class SpecParser(spack.parse.Parser):
return compiler
- def check_identifier(self):
+ def check_identifier(self, id=None):
"""The only identifiers that can contain '.' are versions, but version
ids are context-sensitive so we have to check on a case-by-case
basis. Call this if we detect a version id where it shouldn't be.
"""
- if '.' in self.token.value:
+ if not id:
+ id = self.token.value
+ if '.' in id:
self.last_token_error("Identifier cannot contain '.'")
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 790b22f3b0..eed182a257 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -30,45 +30,85 @@ import spack
from spack.packages import PackageDB
from spack.test.mock_packages_test import *
+# Some sample compiler config data
+a_comps = {
+ "gcc@4.7.3" : {
+ "cc" : "/gcc473",
+ "cxx" : "/g++473",
+ "f77" : None,
+ "f90" : None },
+ "gcc@4.5.0" : {
+ "cc" : "/gcc450",
+ "cxx" : "/g++450",
+ "f77" : "/gfortran",
+ "f90" : "/gfortran" },
+ "clang@3.3" : {
+ "cc" : "<overwritten>",
+ "cxx" : "<overwritten>",
+ "f77" : "<overwritten>",
+ "f90" : "<overwritten>" }
+}
+
+b_comps = {
+ "icc@10.0" : {
+ "cc" : "/icc100",
+ "cxx" : "/icc100",
+ "f77" : None,
+ "f90" : None },
+ "icc@11.1" : {
+ "cc" : "/icc111",
+ "cxx" : "/icp111",
+ "f77" : "/ifort",
+ "f90" : "/ifort" },
+ "clang@3.3" : {
+ "cc" : "/clang",
+ "cxx" : "/clang++",
+ "f77" : None,
+ "f90" : None}
+}
+
class ConfigTest(MockPackagesTest):
def setUp(self):
- self.initmock()
+ super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
- spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
- ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+ spack.config.config_scopes = [
+ ('test_low_priority', os.path.join(self.tmp_dir, 'low')),
+ ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+
def tearDown(self):
- self.cleanmock()
+ super(ConfigTest, self).tearDown()
shutil.rmtree(self.tmp_dir, True)
- def check_config(self, comps):
+
+ def check_config(self, comps, *compiler_names):
+ """Check that named compilers in comps match Spack's config."""
config = spack.config.get_compilers_config()
compiler_list = ['cc', 'cxx', 'f77', 'f90']
- for key in comps:
+ for key in compiler_names:
for c in compiler_list:
- if comps[key][c] == '/bad':
- continue
self.assertEqual(comps[key][c], config[key][c])
- def test_write_key(self):
- a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
- "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
- "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
+ def test_write_key_in_memory(self):
+ # Write b_comps "on top of" a_comps.
+ spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
+ spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
+
+ # Make sure the config looks how we expect.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
- b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
- "icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
- "clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
+ def test_write_key_to_disk(self):
+ # Write b_comps "on top of" a_comps.
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
- self.check_config(a_comps)
- self.check_config(b_comps)
-
+ # Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
- self.check_config(a_comps)
- self.check_config(b_comps)
-
+ # Same check again, to ensure consistency.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index b3ad8efec4..55b3f0b18f 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -34,7 +34,7 @@ from llnl.util.filesystem import *
import spack
from spack.spec import Spec
-from spack.packages import PackageDB
+from spack.packages import PackageFinder
from spack.directory_layout import YamlDirectoryLayout
# number of packages to test (to reduce test time)
@@ -123,7 +123,7 @@ class DirectoryLayoutTest(unittest.TestCase):
information about installed packages' specs to uninstall
or query them again if the package goes away.
"""
- mock_db = PackageDB(spack.mock_packages_path)
+ mock_db = PackageFinder(spack.mock_packages_path)
not_in_mock = set.difference(
set(spack.db.all_package_names()),
@@ -145,8 +145,7 @@ class DirectoryLayoutTest(unittest.TestCase):
self.layout.create_install_directory(spec)
installed_specs[spec] = self.layout.path_for_spec(spec)
- tmp = spack.db
- spack.db = mock_db
+ spack.db.swap(mock_db)
# Now check that even without the package files, we know
# enough to read a spec from the spec file.
@@ -161,7 +160,7 @@ class DirectoryLayoutTest(unittest.TestCase):
self.assertTrue(spec.eq_dag(spec_from_file))
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
- spack.db = tmp
+ spack.db.swap(mock_db)
def test_find(self):
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 00f81114af..071c21b7e0 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -22,29 +22,22 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import sys
import unittest
import spack
import spack.config
-from spack.packages import PackageDB
+from spack.packages import PackageFinder
from spack.spec import Spec
-def set_pkg_dep(pkg, spec):
- """Alters dependence information for a package.
- Use this to mock up constraints.
- """
- spec = Spec(spec)
- spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec }
-
-
class MockPackagesTest(unittest.TestCase):
def initmock(self):
# Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with
# real ones.
- self.real_db = spack.db
- spack.db = PackageDB(spack.mock_packages_path)
+ self.db = PackageFinder(spack.mock_packages_path)
+ spack.db.swap(self.db)
spack.config.clear_config_caches()
self.real_scopes = spack.config.config_scopes
@@ -52,13 +45,39 @@ class MockPackagesTest(unittest.TestCase):
('site', spack.mock_site_config),
('user', spack.mock_user_config)]
+ # Store changes to the package's dependencies so we can
+ # restore later.
+ self.saved_deps = {}
+
+
+ def set_pkg_dep(self, pkg_name, spec):
+ """Alters dependence information for a package.
+
+ Adds a dependency on <spec> to pkg.
+ Use this to mock up constraints.
+ """
+ spec = Spec(spec)
+
+ # Save original dependencies before making any changes.
+ pkg = spack.db.get(pkg_name)
+ if pkg_name not in self.saved_deps:
+ self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
+
+ # Change dep spec
+ pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
+
def cleanmock(self):
"""Restore the real packages path after any test."""
- spack.db = self.real_db
+ spack.db.swap(self.db)
spack.config.config_scopes = self.real_scopes
spack.config.clear_config_caches()
+ # Restore dependency changes that happened during the test
+ for pkg_name, (pkg, deps) in self.saved_deps.items():
+ pkg.dependencies.clear()
+ pkg.dependencies.update(deps)
+
def setUp(self):
self.initmock()
@@ -66,5 +85,3 @@ class MockPackagesTest(unittest.TestCase):
def tearDown(self):
self.cleanmock()
-
-
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 6222e7b5f8..a5925ea066 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -29,7 +29,7 @@ import unittest
import spack
import spack.url as url
-from spack.packages import PackageDB
+from spack.packages import PackageFinder
class PackageSanityTest(unittest.TestCase):
@@ -47,10 +47,10 @@ class PackageSanityTest(unittest.TestCase):
def test_get_all_mock_packages(self):
"""Get the mock packages once each too."""
- tmp = spack.db
- spack.db = PackageDB(spack.mock_packages_path)
+ db = PackageFinder(spack.mock_packages_path)
+ spack.db.swap(db)
self.check_db()
- spack.db = tmp
+ spack.db.swap(db)
def test_url_versions(self):
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index a8183cf6a6..42bd91ec5c 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -44,7 +44,8 @@ class PackagesTest(MockPackagesTest):
def test_package_filename(self):
- filename = spack.db.filename_for_package_name('mpich')
+ repo = spack.db.get_repo('gov.llnl.spack.mock')
+ filename = repo.filename_for_package_name('mpich')
self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py'))
@@ -54,7 +55,8 @@ class PackagesTest(MockPackagesTest):
def test_nonexisting_package_filename(self):
- filename = spack.db.filename_for_package_name('some-nonexisting-package')
+ repo = spack.db.get_repo('gov.llnl.spack.mock')
+ filename = repo.filename_for_package_name('some-nonexisting-package')
self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py'))
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 549f829d3e..a71026d183 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -40,8 +40,8 @@ from spack.test.mock_packages_test import *
class SpecDagTest(MockPackagesTest):
def test_conflicting_package_constraints(self):
- set_pkg_dep('mpileaks', 'mpich@1.0')
- set_pkg_dep('callpath', 'mpich@2.0')
+ self.set_pkg_dep('mpileaks', 'mpich@1.0')
+ self.set_pkg_dep('callpath', 'mpich@2.0')
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@@ -223,25 +223,25 @@ class SpecDagTest(MockPackagesTest):
def test_unsatisfiable_version(self):
- set_pkg_dep('mpileaks', 'mpich@1.0')
+ self.set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
def test_unsatisfiable_compiler(self):
- set_pkg_dep('mpileaks', 'mpich%gcc')
+ self.set_pkg_dep('mpileaks', 'mpich%gcc')
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_compiler_version(self):
- set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
+ self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_architecture(self):
- set_pkg_dep('mpileaks', 'mpich=bgqos_0')
+ self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 782afbd4bb..a7b6e2b436 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -1,10 +1,14 @@
# Need this because of spack.util.string
from __future__ import absolute_import
import string
+import itertools
import re
import spack
+__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name',
+ 'validate_module_name', 'possible_spack_module_names']
+
# Valid module names can contain '-' but can't start with it.
_valid_module_re = r'^\w[\w-]*$'
@@ -42,6 +46,33 @@ def mod_to_class(mod_name):
return class_name
+def spack_module_to_python_module(mod_name):
+ """Given a Spack module name, returns the name by which it can be
+ imported in Python.
+ """
+ if re.match(r'[0-9]', mod_name):
+ mod_name = 'num' + mod_name
+
+ return mod_name.replace('-', '_')
+
+
+def possible_spack_module_names(python_mod_name):
+ """Given a Python module name, return a list of all possible spack module
+ names that could correspond to it."""
+ mod_name = re.sub(r'^num(\d)', r'\1', python_mod_name)
+
+ parts = re.split(r'(_)', mod_name)
+ options = [['_', '-']] * mod_name.count('_')
+
+ results = []
+ for subs in itertools.product(*options):
+ s = list(parts)
+ s[1::2] = subs
+ results.append(''.join(s))
+
+ return results
+
+
def valid_module_name(mod_name):
"""Return whether the mod_name is valid for use in Spack."""
return bool(re.match(_valid_module_re, mod_name))
diff --git a/var/spack/mock_packages/_repo.yaml b/var/spack/mock_packages/_repo.yaml
new file mode 100644
index 0000000000..b97b978de3
--- /dev/null
+++ b/var/spack/mock_packages/_repo.yaml
@@ -0,0 +1,2 @@
+repo:
+ namespace: gov.llnl.spack.mock
diff --git a/var/spack/packages/_repo.yaml b/var/spack/packages/_repo.yaml
new file mode 100644
index 0000000000..4a371e1cad
--- /dev/null
+++ b/var/spack/packages/_repo.yaml
@@ -0,0 +1,2 @@
+repo:
+ namespace: gov.llnl.spack