summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorTodd Gamblin <tgamblin@llnl.gov>2014-09-16 23:53:44 -0700
committerTodd Gamblin <tgamblin@llnl.gov>2014-09-16 23:53:44 -0700
commit881fdb66ae9a06671cc756baae5a454fce2c2e3d (patch)
treeac5a9928f306ae26a32582b60cffc5853e15c514 /lib
parent782e45e5b1247f636f519c23de4cc54fe0cc21d1 (diff)
parente2509717b9fc022ddbdfab532fcfc0189102c741 (diff)
downloadspack-881fdb66ae9a06671cc756baae5a454fce2c2e3d.tar.gz
spack-881fdb66ae9a06671cc756baae5a454fce2c2e3d.tar.bz2
spack-881fdb66ae9a06671cc756baae5a454fce2c2e3d.tar.xz
spack-881fdb66ae9a06671cc756baae5a454fce2c2e3d.zip
Merge branch 'develop' into features/fileutils
Conflicts: lib/spack/spack/packages.py
Diffstat (limited to 'lib')
-rwxr-xr-xlib/spack/env/cc6
-rw-r--r--lib/spack/external/__init__.py33
-rw-r--r--lib/spack/external/argparse.py2382
-rw-r--r--lib/spack/external/functools.py30
-rw-r--r--lib/spack/external/ordereddict.py262
-rwxr-xr-xlib/spack/external/pyqver2.py393
-rw-r--r--lib/spack/spack/__init__.py44
-rw-r--r--lib/spack/spack/build_environment.py6
-rw-r--r--lib/spack/spack/cmd/bootstrap.py25
-rw-r--r--lib/spack/spack/cmd/cd.py38
-rw-r--r--lib/spack/spack/cmd/checksum.py2
-rw-r--r--lib/spack/spack/cmd/clean.py2
-rw-r--r--lib/spack/spack/cmd/compiler.py2
-rw-r--r--lib/spack/spack/cmd/config.py2
-rw-r--r--lib/spack/spack/cmd/dependents.py8
-rw-r--r--lib/spack/spack/cmd/dotkit.py99
-rw-r--r--lib/spack/spack/cmd/fetch.py2
-rw-r--r--lib/spack/spack/cmd/find.py4
-rw-r--r--lib/spack/spack/cmd/info.py10
-rw-r--r--lib/spack/spack/cmd/install.py2
-rw-r--r--lib/spack/spack/cmd/load.py38
-rw-r--r--lib/spack/spack/cmd/location.py93
-rw-r--r--lib/spack/spack/cmd/mirror.py4
-rw-r--r--lib/spack/spack/cmd/module.py107
-rw-r--r--lib/spack/spack/cmd/patch.py2
-rw-r--r--lib/spack/spack/cmd/providers.py2
-rw-r--r--lib/spack/spack/cmd/python.py2
-rw-r--r--lib/spack/spack/cmd/spec.py2
-rw-r--r--lib/spack/spack/cmd/stage.py13
-rw-r--r--lib/spack/spack/cmd/uninstall.py2
-rw-r--r--lib/spack/spack/cmd/unload.py38
-rw-r--r--lib/spack/spack/cmd/unuse.py10
-rw-r--r--lib/spack/spack/cmd/use.py24
-rw-r--r--lib/spack/spack/compiler.py2
-rw-r--r--lib/spack/spack/compilers/__init__.py2
-rw-r--r--lib/spack/spack/concretize.py2
-rw-r--r--lib/spack/spack/config.py66
-rw-r--r--lib/spack/spack/directory_layout.py47
-rw-r--r--lib/spack/spack/error.py3
-rw-r--r--lib/spack/spack/hooks/dotkit.py58
-rw-r--r--lib/spack/spack/hooks/tclmodule.py35
-rw-r--r--lib/spack/spack/modules.py247
-rw-r--r--lib/spack/spack/package.py4
-rw-r--r--lib/spack/spack/packages.py49
-rw-r--r--lib/spack/spack/relations.py1
-rw-r--r--lib/spack/spack/spec.py296
-rw-r--r--lib/spack/spack/stage.py3
-rw-r--r--lib/spack/spack/test/__init__.py8
-rw-r--r--lib/spack/spack/test/concretize.py24
-rw-r--r--lib/spack/spack/test/directory_layout.py155
-rw-r--r--lib/spack/spack/test/install.py16
-rw-r--r--lib/spack/spack/test/mock_packages_test.py3
-rw-r--r--lib/spack/spack/test/python_version.py97
-rw-r--r--lib/spack/spack/test/spec_dag.py238
-rw-r--r--lib/spack/spack/test/stage.py122
-rw-r--r--lib/spack/spack/util/crypto.py2
-rw-r--r--lib/spack/spack/util/executable.py2
-rw-r--r--lib/spack/spack/util/string.py4
-rw-r--r--lib/spack/spack/version.py3
59 files changed, 4625 insertions, 553 deletions
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 9e71d25caf..266e41cb48 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -1,12 +1,11 @@
#!/usr/bin/env python
import sys
-if not sys.version_info[:2] >= (2,7):
- sys.exit("Spack requires Python 2.7. Version was %s." % sys.version_info)
+if not sys.version_info[:2] >= (2,6):
+ sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info)
import os
import re
import subprocess
-import argparse
from contextlib import closing
# Import spack parameters through the build environment.
@@ -18,6 +17,7 @@ if not spack_lib:
# Grab a minimal set of spack packages
sys.path.append(spack_lib)
from spack.compilation import *
+from external import argparse
import llnl.util.tty as tty
spack_prefix = get_env_var("SPACK_PREFIX")
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
new file mode 100644
index 0000000000..1cc981930a
--- /dev/null
+++ b/lib/spack/external/__init__.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+This module contains external, potentially separately licensed,
+packages that are included in spack.
+
+So far:
+ argparse: We include our own version to be Python 2.6 compatible.
+ pyqver2: External script to query required python version of python source code.
+ Used for ensuring 2.6 compatibility.
+"""
diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py
new file mode 100644
index 0000000000..c8dfdd3bed
--- /dev/null
+++ b/lib/spack/external/argparse.py
@@ -0,0 +1,2382 @@
+# argparse is (c) 2006-2009 Steven J. Bethard <steven.bethard@gmail.com>.
+#
+# The argparse module was contributed to Python as of Python 2.7 and thus
+# was licensed under the Python license. Same license applies to all files in
+# the argparse package project.
+#
+# For details about the Python License, please see doc/Python-License.txt.
+#
+# History
+# -------
+#
+# Before (and including) argparse 1.1, the argparse package was licensed under
+# Apache License v2.0.
+#
+# After argparse 1.1, all project files from the argparse project were deleted
+# due to license compatibility issues between Apache License 2.0 and GNU GPL v2.
+#
+# The project repository then had a clean start with some files taken from
+# Python 2.7.1, so definitely all files are under Python License now.
+#
+# Author: Steven J. Bethard <steven.bethard@gmail.com>.
+#
+"""Command-line parsing library
+
+This module is an optparse-inspired command-line parsing library that:
+
+ - handles both optional and positional arguments
+ - produces highly informative usage messages
+ - supports parsers that dispatch to sub-parsers
+
+The following is a simple usage example that sums integers from the
+command-line and writes the result to a file::
+
+ parser = argparse.ArgumentParser(
+ description='sum the integers at the command line')
+ parser.add_argument(
+ 'integers', metavar='int', nargs='+', type=int,
+ help='an integer to be summed')
+ parser.add_argument(
+ '--log', default=sys.stdout, type=argparse.FileType('w'),
+ help='the file where the sum should be written')
+ args = parser.parse_args()
+ args.log.write('%s' % sum(args.integers))
+ args.log.close()
+
+The module contains the following public classes:
+
+ - ArgumentParser -- The main entry point for command-line parsing. As the
+ example above shows, the add_argument() method is used to populate
+ the parser with actions for optional and positional arguments. Then
+ the parse_args() method is invoked to convert the args at the
+ command-line into an object with attributes.
+
+ - ArgumentError -- The exception raised by ArgumentParser objects when
+ there are errors with the parser's actions. Errors raised while
+ parsing the command-line are caught by ArgumentParser and emitted
+ as command-line messages.
+
+ - FileType -- A factory for defining types of files to be created. As the
+ example above shows, instances of FileType are typically passed as
+ the type= argument of add_argument() calls.
+
+ - Action -- The base class for parser actions. Typically actions are
+ selected by passing strings like 'store_true' or 'append_const' to
+ the action= argument of add_argument(). However, for greater
+ customization of ArgumentParser actions, subclasses of Action may
+ be defined and passed as the action= argument.
+
+ - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
+ ArgumentDefaultsHelpFormatter -- Formatter classes which
+ may be passed as the formatter_class= argument to the
+ ArgumentParser constructor. HelpFormatter is the default,
+ RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
+ not to change the formatting for help text, and
+ ArgumentDefaultsHelpFormatter adds information about argument defaults
+ to the help.
+
+All other classes in this module are considered implementation details.
+(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
+considered public as object names -- the API of the formatter objects is
+still considered an implementation detail.)
+"""
+
+__version__ = '1.2.1'
+__all__ = [
+ 'ArgumentParser',
+ 'ArgumentError',
+ 'ArgumentTypeError',
+ 'FileType',
+ 'HelpFormatter',
+ 'ArgumentDefaultsHelpFormatter',
+ 'RawDescriptionHelpFormatter',
+ 'RawTextHelpFormatter',
+ 'Namespace',
+ 'Action',
+ 'ONE_OR_MORE',
+ 'OPTIONAL',
+ 'PARSER',
+ 'REMAINDER',
+ 'SUPPRESS',
+ 'ZERO_OR_MORE',
+]
+
+
+import copy as _copy
+import os as _os
+import re as _re
+import sys as _sys
+import textwrap as _textwrap
+
+from gettext import gettext as _
+
+try:
+ set
+except NameError:
+ # for python < 2.4 compatibility (sets module is there since 2.3):
+ from sets import Set as set
+
+try:
+ basestring
+except NameError:
+ basestring = str
+
+try:
+ sorted
+except NameError:
+ # for python < 2.4 compatibility:
+ def sorted(iterable, reverse=False):
+ result = list(iterable)
+ result.sort()
+ if reverse:
+ result.reverse()
+ return result
+
+
+def _callable(obj):
+ return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
+
+
+SUPPRESS = '==SUPPRESS=='
+
+OPTIONAL = '?'
+ZERO_OR_MORE = '*'
+ONE_OR_MORE = '+'
+PARSER = 'A...'
+REMAINDER = '...'
+_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
+
+# =============================
+# Utility functions and classes
+# =============================
+
+class _AttributeHolder(object):
+ """Abstract base class that provides __repr__.
+
+ The __repr__ method returns a string in the format::
+ ClassName(attr=name, attr=name, ...)
+ The attributes are determined either by a class-level attribute,
+ '_kwarg_names', or by inspecting the instance __dict__.
+ """
+
+ def __repr__(self):
+ type_name = type(self).__name__
+ arg_strings = []
+ for arg in self._get_args():
+ arg_strings.append(repr(arg))
+ for name, value in self._get_kwargs():
+ arg_strings.append('%s=%r' % (name, value))
+ return '%s(%s)' % (type_name, ', '.join(arg_strings))
+
+ def _get_kwargs(self):
+ return sorted(self.__dict__.items())
+
+ def _get_args(self):
+ return []
+
+
+def _ensure_value(namespace, name, value):
+ if getattr(namespace, name, None) is None:
+ setattr(namespace, name, value)
+ return getattr(namespace, name)
+
+
+# ===============
+# Formatting Help
+# ===============
+
+class HelpFormatter(object):
+ """Formatter for generating usage messages and argument help strings.
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def __init__(self,
+ prog,
+ indent_increment=2,
+ max_help_position=24,
+ width=None):
+
+ # default setting for width
+ if width is None:
+ try:
+ width = int(_os.environ['COLUMNS'])
+ except (KeyError, ValueError):
+ width = 80
+ width -= 2
+
+ self._prog = prog
+ self._indent_increment = indent_increment
+ self._max_help_position = max_help_position
+ self._width = width
+
+ self._current_indent = 0
+ self._level = 0
+ self._action_max_length = 0
+
+ self._root_section = self._Section(self, None)
+ self._current_section = self._root_section
+
+ self._whitespace_matcher = _re.compile(r'\s+')
+ self._long_break_matcher = _re.compile(r'\n\n\n+')
+
+ # ===============================
+ # Section and indentation methods
+ # ===============================
+ def _indent(self):
+ self._current_indent += self._indent_increment
+ self._level += 1
+
+ def _dedent(self):
+ self._current_indent -= self._indent_increment
+ assert self._current_indent >= 0, 'Indent decreased below 0.'
+ self._level -= 1
+
+ class _Section(object):
+
+ def __init__(self, formatter, parent, heading=None):
+ self.formatter = formatter
+ self.parent = parent
+ self.heading = heading
+ self.items = []
+
+ def format_help(self):
+ # format the indented section
+ if self.parent is not None:
+ self.formatter._indent()
+ join = self.formatter._join_parts
+ for func, args in self.items:
+ func(*args)
+ item_help = join([func(*args) for func, args in self.items])
+ if self.parent is not None:
+ self.formatter._dedent()
+
+ # return nothing if the section was empty
+ if not item_help:
+ return ''
+
+ # add the heading if the section was non-empty
+ if self.heading is not SUPPRESS and self.heading is not None:
+ current_indent = self.formatter._current_indent
+ heading = '%*s%s:\n' % (current_indent, '', self.heading)
+ else:
+ heading = ''
+
+ # join the section-initial newline, the heading and the help
+ return join(['\n', heading, item_help, '\n'])
+
+ def _add_item(self, func, args):
+ self._current_section.items.append((func, args))
+
+ # ========================
+ # Message building methods
+ # ========================
+ def start_section(self, heading):
+ self._indent()
+ section = self._Section(self, self._current_section, heading)
+ self._add_item(section.format_help, [])
+ self._current_section = section
+
+ def end_section(self):
+ self._current_section = self._current_section.parent
+ self._dedent()
+
+ def add_text(self, text):
+ if text is not SUPPRESS and text is not None:
+ self._add_item(self._format_text, [text])
+
+ def add_usage(self, usage, actions, groups, prefix=None):
+ if usage is not SUPPRESS:
+ args = usage, actions, groups, prefix
+ self._add_item(self._format_usage, args)
+
+ def add_argument(self, action):
+ if action.help is not SUPPRESS:
+
+ # find all invocations
+ get_invocation = self._format_action_invocation
+ invocations = [get_invocation(action)]
+ for subaction in self._iter_indented_subactions(action):
+ invocations.append(get_invocation(subaction))
+
+ # update the maximum item length
+ invocation_length = max([len(s) for s in invocations])
+ action_length = invocation_length + self._current_indent
+ self._action_max_length = max(self._action_max_length,
+ action_length)
+
+ # add the item to the list
+ self._add_item(self._format_action, [action])
+
+ def add_arguments(self, actions):
+ for action in actions:
+ self.add_argument(action)
+
+ # =======================
+ # Help-formatting methods
+ # =======================
+ def format_help(self):
+ help = self._root_section.format_help()
+ if help:
+ help = self._long_break_matcher.sub('\n\n', help)
+ help = help.strip('\n') + '\n'
+ return help
+
+ def _join_parts(self, part_strings):
+ return ''.join([part
+ for part in part_strings
+ if part and part is not SUPPRESS])
+
+ def _format_usage(self, usage, actions, groups, prefix):
+ if prefix is None:
+ prefix = _('usage: ')
+
+ # if usage is specified, use that
+ if usage is not None:
+ usage = usage % dict(prog=self._prog)
+
+ # if no optionals or positionals are available, usage is just prog
+ elif usage is None and not actions:
+ usage = '%(prog)s' % dict(prog=self._prog)
+
+ # if optionals and positionals are available, calculate usage
+ elif usage is None:
+ prog = '%(prog)s' % dict(prog=self._prog)
+
+ # split optionals from positionals
+ optionals = []
+ positionals = []
+ for action in actions:
+ if action.option_strings:
+ optionals.append(action)
+ else:
+ positionals.append(action)
+
+ # build full usage string
+ format = self._format_actions_usage
+ action_usage = format(optionals + positionals, groups)
+ usage = ' '.join([s for s in [prog, action_usage] if s])
+
+ # wrap the usage parts if it's too long
+ text_width = self._width - self._current_indent
+ if len(prefix) + len(usage) > text_width:
+
+ # break usage into wrappable parts
+ part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
+ opt_usage = format(optionals, groups)
+ pos_usage = format(positionals, groups)
+ opt_parts = _re.findall(part_regexp, opt_usage)
+ pos_parts = _re.findall(part_regexp, pos_usage)
+ assert ' '.join(opt_parts) == opt_usage
+ assert ' '.join(pos_parts) == pos_usage
+
+ # helper for wrapping lines
+ def get_lines(parts, indent, prefix=None):
+ lines = []
+ line = []
+ if prefix is not None:
+ line_len = len(prefix) - 1
+ else:
+ line_len = len(indent) - 1
+ for part in parts:
+ if line_len + 1 + len(part) > text_width:
+ lines.append(indent + ' '.join(line))
+ line = []
+ line_len = len(indent) - 1
+ line.append(part)
+ line_len += len(part) + 1
+ if line:
+ lines.append(indent + ' '.join(line))
+ if prefix is not None:
+ lines[0] = lines[0][len(indent):]
+ return lines
+
+ # if prog is short, follow it with optionals or positionals
+ if len(prefix) + len(prog) <= 0.75 * text_width:
+ indent = ' ' * (len(prefix) + len(prog) + 1)
+ if opt_parts:
+ lines = get_lines([prog] + opt_parts, indent, prefix)
+ lines.extend(get_lines(pos_parts, indent))
+ elif pos_parts:
+ lines = get_lines([prog] + pos_parts, indent, prefix)
+ else:
+ lines = [prog]
+
+ # if prog is long, put it on its own line
+ else:
+ indent = ' ' * len(prefix)
+ parts = opt_parts + pos_parts
+ lines = get_lines(parts, indent)
+ if len(lines) > 1:
+ lines = []
+ lines.extend(get_lines(opt_parts, indent))
+ lines.extend(get_lines(pos_parts, indent))
+ lines = [prog] + lines
+
+ # join lines into usage
+ usage = '\n'.join(lines)
+
+ # prefix with 'usage:'
+ return '%s%s\n\n' % (prefix, usage)
+
+ def _format_actions_usage(self, actions, groups):
+ # find group indices and identify actions in groups
+ group_actions = set()
+ inserts = {}
+ for group in groups:
+ try:
+ start = actions.index(group._group_actions[0])
+ except ValueError:
+ continue
+ else:
+ end = start + len(group._group_actions)
+ if actions[start:end] == group._group_actions:
+ for action in group._group_actions:
+ group_actions.add(action)
+ if not group.required:
+ if start in inserts:
+ inserts[start] += ' ['
+ else:
+ inserts[start] = '['
+ inserts[end] = ']'
+ else:
+ if start in inserts:
+ inserts[start] += ' ('
+ else:
+ inserts[start] = '('
+ inserts[end] = ')'
+ for i in range(start + 1, end):
+ inserts[i] = '|'
+
+ # collect all actions format strings
+ parts = []
+ for i, action in enumerate(actions):
+
+ # suppressed arguments are marked with None
+ # remove | separators for suppressed arguments
+ if action.help is SUPPRESS:
+ parts.append(None)
+ if inserts.get(i) == '|':
+ inserts.pop(i)
+ elif inserts.get(i + 1) == '|':
+ inserts.pop(i + 1)
+
+ # produce all arg strings
+ elif not action.option_strings:
+ part = self._format_args(action, action.dest)
+
+ # if it's in a group, strip the outer []
+ if action in group_actions:
+ if part[0] == '[' and part[-1] == ']':
+ part = part[1:-1]
+
+ # add the action string to the list
+ parts.append(part)
+
+ # produce the first way to invoke the option in brackets
+ else:
+ option_string = action.option_strings[0]
+
+ # if the Optional doesn't take a value, format is:
+ # -s or --long
+ if action.nargs == 0:
+ part = '%s' % option_string
+
+ # if the Optional takes a value, format is:
+ # -s ARGS or --long ARGS
+ else:
+ default = action.dest.upper()
+ args_string = self._format_args(action, default)
+ part = '%s %s' % (option_string, args_string)
+
+ # make it look optional if it's not required or in a group
+ if not action.required and action not in group_actions:
+ part = '[%s]' % part
+
+ # add the action string to the list
+ parts.append(part)
+
+ # insert things at the necessary indices
+ for i in sorted(inserts, reverse=True):
+ parts[i:i] = [inserts[i]]
+
+ # join all the action items with spaces
+ text = ' '.join([item for item in parts if item is not None])
+
+ # clean up separators for mutually exclusive groups
+ open = r'[\[(]'
+ close = r'[\])]'
+ text = _re.sub(r'(%s) ' % open, r'\1', text)
+ text = _re.sub(r' (%s)' % close, r'\1', text)
+ text = _re.sub(r'%s *%s' % (open, close), r'', text)
+ text = _re.sub(r'\(([^|]*)\)', r'\1', text)
+ text = text.strip()
+
+ # return the text
+ return text
+
+ def _format_text(self, text):
+ if '%(prog)' in text:
+ text = text % dict(prog=self._prog)
+ text_width = self._width - self._current_indent
+ indent = ' ' * self._current_indent
+ return self._fill_text(text, text_width, indent) + '\n\n'
+
+ def _format_action(self, action):
+ # determine the required width and the entry label
+ help_position = min(self._action_max_length + 2,
+ self._max_help_position)
+ help_width = self._width - help_position
+ action_width = help_position - self._current_indent - 2
+ action_header = self._format_action_invocation(action)
+
+ # ho nelp; start on same line and add a final newline
+ if not action.help:
+ tup = self._current_indent, '', action_header
+ action_header = '%*s%s\n' % tup
+
+ # short action name; start on the same line and pad two spaces
+ elif len(action_header) <= action_width:
+ tup = self._current_indent, '', action_width, action_header
+ action_header = '%*s%-*s ' % tup
+ indent_first = 0
+
+ # long action name; start on the next line
+ else:
+ tup = self._current_indent, '', action_header
+ action_header = '%*s%s\n' % tup
+ indent_first = help_position
+
+ # collect the pieces of the action help
+ parts = [action_header]
+
+ # if there was help for the action, add lines of help text
+ if action.help:
+ help_text = self._expand_help(action)
+ help_lines = self._split_lines(help_text, help_width)
+ parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
+ for line in help_lines[1:]:
+ parts.append('%*s%s\n' % (help_position, '', line))
+
+ # or add a newline if the description doesn't end with one
+ elif not action_header.endswith('\n'):
+ parts.append('\n')
+
+ # if there are any sub-actions, add their help as well
+ for subaction in self._iter_indented_subactions(action):
+ parts.append(self._format_action(subaction))
+
+ # return a single string
+ return self._join_parts(parts)
+
+ def _format_action_invocation(self, action):
+ if not action.option_strings:
+ metavar, = self._metavar_formatter(action, action.dest)(1)
+ return metavar
+
+ else:
+ parts = []
+
+ # if the Optional doesn't take a value, format is:
+ # -s, --long
+ if action.nargs == 0:
+ parts.extend(action.option_strings)
+
+ # if the Optional takes a value, format is:
+ # -s ARGS, --long ARGS
+ else:
+ default = action.dest.upper()
+ args_string = self._format_args(action, default)
+ for option_string in action.option_strings:
+ parts.append('%s %s' % (option_string, args_string))
+
+ return ', '.join(parts)
+
+ def _metavar_formatter(self, action, default_metavar):
+ if action.metavar is not None:
+ result = action.metavar
+ elif action.choices is not None:
+ choice_strs = [str(choice) for choice in action.choices]
+ result = '{%s}' % ','.join(choice_strs)
+ else:
+ result = default_metavar
+
+ def format(tuple_size):
+ if isinstance(result, tuple):
+ return result
+ else:
+ return (result, ) * tuple_size
+ return format
+
+ def _format_args(self, action, default_metavar):
+ get_metavar = self._metavar_formatter(action, default_metavar)
+ if action.nargs is None:
+ result = '%s' % get_metavar(1)
+ elif action.nargs == OPTIONAL:
+ result = '[%s]' % get_metavar(1)
+ elif action.nargs == ZERO_OR_MORE:
+ result = '[%s [%s ...]]' % get_metavar(2)
+ elif action.nargs == ONE_OR_MORE:
+ result = '%s [%s ...]' % get_metavar(2)
+ elif action.nargs == REMAINDER:
+ result = '...'
+ elif action.nargs == PARSER:
+ result = '%s ...' % get_metavar(1)
+ else:
+ formats = ['%s' for _ in range(action.nargs)]
+ result = ' '.join(formats) % get_metavar(action.nargs)
+ return result
+
+ def _expand_help(self, action):
+ params = dict(vars(action), prog=self._prog)
+ for name in list(params):
+ if params[name] is SUPPRESS:
+ del params[name]
+ for name in list(params):
+ if hasattr(params[name], '__name__'):
+ params[name] = params[name].__name__
+ if params.get('choices') is not None:
+ choices_str = ', '.join([str(c) for c in params['choices']])
+ params['choices'] = choices_str
+ return self._get_help_string(action) % params
+
+ def _iter_indented_subactions(self, action):
+ try:
+ get_subactions = action._get_subactions
+ except AttributeError:
+ pass
+ else:
+ self._indent()
+ for subaction in get_subactions():
+ yield subaction
+ self._dedent()
+
+ def _split_lines(self, text, width):
+ text = self._whitespace_matcher.sub(' ', text).strip()
+ return _textwrap.wrap(text, width)
+
+ def _fill_text(self, text, width, indent):
+ text = self._whitespace_matcher.sub(' ', text).strip()
+ return _textwrap.fill(text, width, initial_indent=indent,
+ subsequent_indent=indent)
+
+ def _get_help_string(self, action):
+ return action.help
+
+
+class RawDescriptionHelpFormatter(HelpFormatter):
+ """Help message formatter which retains any formatting in descriptions.
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def _fill_text(self, text, width, indent):
+ return ''.join([indent + line for line in text.splitlines(True)])
+
+
+class RawTextHelpFormatter(RawDescriptionHelpFormatter):
+ """Help message formatter which retains formatting of all help text.
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def _split_lines(self, text, width):
+ return text.splitlines()
+
+
+class ArgumentDefaultsHelpFormatter(HelpFormatter):
+ """Help message formatter which adds default values to argument help.
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def _get_help_string(self, action):
+ help = action.help
+ if '%(default)' not in action.help:
+ if action.default is not SUPPRESS:
+ defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
+ if action.option_strings or action.nargs in defaulting_nargs:
+ help += ' (default: %(default)s)'
+ return help
+
+
+# =====================
+# Options and Arguments
+# =====================
+
+def _get_action_name(argument):
+ if argument is None:
+ return None
+ elif argument.option_strings:
+ return '/'.join(argument.option_strings)
+ elif argument.metavar not in (None, SUPPRESS):
+ return argument.metavar
+ elif argument.dest not in (None, SUPPRESS):
+ return argument.dest
+ else:
+ return None
+
+
+class ArgumentError(Exception):
+ """An error from creating or using an argument (optional or positional).
+
+ The string value of this exception is the message, augmented with
+ information about the argument that caused it.
+ """
+
+ def __init__(self, argument, message):
+ self.argument_name = _get_action_name(argument)
+ self.message = message
+
+ def __str__(self):
+ if self.argument_name is None:
+ format = '%(message)s'
+ else:
+ format = 'argument %(argument_name)s: %(message)s'
+ return format % dict(message=self.message,
+ argument_name=self.argument_name)
+
+
+class ArgumentTypeError(Exception):
+ """An error from trying to convert a command line string to a type."""
+ pass
+
+
+# ==============
+# Action classes
+# ==============
+
+class Action(_AttributeHolder):
+ """Information about how to convert command line strings to Python objects.
+
+ Action objects are used by an ArgumentParser to represent the information
+ needed to parse a single argument from one or more strings from the
+ command line. The keyword arguments to the Action constructor are also
+ all attributes of Action instances.
+
+ Keyword Arguments:
+
+ - option_strings -- A list of command-line option strings which
+ should be associated with this action.
+
+ - dest -- The name of the attribute to hold the created object(s)
+
+ - nargs -- The number of command-line arguments that should be
+ consumed. By default, one argument will be consumed and a single
+ value will be produced. Other values include:
+ - N (an integer) consumes N arguments (and produces a list)
+ - '?' consumes zero or one arguments
+ - '*' consumes zero or more arguments (and produces a list)
+ - '+' consumes one or more arguments (and produces a list)
+ Note that the difference between the default and nargs=1 is that
+ with the default, a single value will be produced, while with
+ nargs=1, a list containing a single value will be produced.
+
+ - const -- The value to be produced if the option is specified and the
+ option uses an action that takes no values.
+
+ - default -- The value to be produced if the option is not specified.
+
+ - type -- The type which the command-line arguments should be converted
+ to, should be one of 'string', 'int', 'float', 'complex' or a
+ callable object that accepts a single string argument. If None,
+ 'string' is assumed.
+
+ - choices -- A container of values that should be allowed. If not None,
+ after a command-line argument has been converted to the appropriate
+ type, an exception will be raised if it is not a member of this
+ collection.
+
+ - required -- True if the action must always be specified at the
+ command line. This is only meaningful for optional command-line
+ arguments.
+
+ - help -- The help string describing the argument.
+
+ - metavar -- The name to be used for the option's argument with the
+ help string. If None, the 'dest' value will be used as the name.
+ """
+
+ def __init__(self,
+ option_strings,
+ dest,
+ nargs=None,
+ const=None,
+ default=None,
+ type=None,
+ choices=None,
+ required=False,
+ help=None,
+ metavar=None):
+ self.option_strings = option_strings
+ self.dest = dest
+ self.nargs = nargs
+ self.const = const
+ self.default = default
+ self.type = type
+ self.choices = choices
+ self.required = required
+ self.help = help
+ self.metavar = metavar
+
+ def _get_kwargs(self):
+ names = [
+ 'option_strings',
+ 'dest',
+ 'nargs',
+ 'const',
+ 'default',
+ 'type',
+ 'choices',
+ 'help',
+ 'metavar',
+ ]
+ return [(name, getattr(self, name)) for name in names]
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ raise NotImplementedError(_('.__call__() not defined'))
+
+
+class _StoreAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ nargs=None,
+ const=None,
+ default=None,
+ type=None,
+ choices=None,
+ required=False,
+ help=None,
+ metavar=None):
+ if nargs == 0:
+ raise ValueError('nargs for store actions must be > 0; if you '
+ 'have nothing to store, actions such as store '
+ 'true or store const may be more appropriate')
+ if const is not None and nargs != OPTIONAL:
+ raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+ super(_StoreAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=nargs,
+ const=const,
+ default=default,
+ type=type,
+ choices=choices,
+ required=required,
+ help=help,
+ metavar=metavar)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, values)
+
+
+class _StoreConstAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ const,
+ default=None,
+ required=False,
+ help=None,
+ metavar=None):
+ super(_StoreConstAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=0,
+ const=const,
+ default=default,
+ required=required,
+ help=help)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, self.const)
+
+
+class _StoreTrueAction(_StoreConstAction):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ default=False,
+ required=False,
+ help=None):
+ super(_StoreTrueAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ const=True,
+ default=default,
+ required=required,
+ help=help)
+
+
+class _StoreFalseAction(_StoreConstAction):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ default=True,
+ required=False,
+ help=None):
+ super(_StoreFalseAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ const=False,
+ default=default,
+ required=required,
+ help=help)
+
+
+class _AppendAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ nargs=None,
+ const=None,
+ default=None,
+ type=None,
+ choices=None,
+ required=False,
+ help=None,
+ metavar=None):
+ if nargs == 0:
+ raise ValueError('nargs for append actions must be > 0; if arg '
+ 'strings are not supplying the value to append, '
+ 'the append const action may be more appropriate')
+ if const is not None and nargs != OPTIONAL:
+ raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+ super(_AppendAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=nargs,
+ const=const,
+ default=default,
+ type=type,
+ choices=choices,
+ required=required,
+ help=help,
+ metavar=metavar)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ items = _copy.copy(_ensure_value(namespace, self.dest, []))
+ items.append(values)
+ setattr(namespace, self.dest, items)
+
+
+class _AppendConstAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ const,
+ default=None,
+ required=False,
+ help=None,
+ metavar=None):
+ super(_AppendConstAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=0,
+ const=const,
+ default=default,
+ required=required,
+ help=help,
+ metavar=metavar)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ items = _copy.copy(_ensure_value(namespace, self.dest, []))
+ items.append(self.const)
+ setattr(namespace, self.dest, items)
+
+
+class _CountAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest,
+ default=None,
+ required=False,
+ help=None):
+ super(_CountAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=0,
+ default=default,
+ required=required,
+ help=help)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ new_count = _ensure_value(namespace, self.dest, 0) + 1
+ setattr(namespace, self.dest, new_count)
+
+
+class _HelpAction(Action):
+
+ def __init__(self,
+ option_strings,
+ dest=SUPPRESS,
+ default=SUPPRESS,
+ help=None):
+ super(_HelpAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ parser.print_help()
+ parser.exit()
+
+
+class _VersionAction(Action):
+
+ def __init__(self,
+ option_strings,
+ version=None,
+ dest=SUPPRESS,
+ default=SUPPRESS,
+ help="show program's version number and exit"):
+ super(_VersionAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+ self.version = version
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ version = self.version
+ if version is None:
+ version = parser.version
+ formatter = parser._get_formatter()
+ formatter.add_text(version)
+ parser.exit(message=formatter.format_help())
+
+
+class _SubParsersAction(Action):
+
+ class _ChoicesPseudoAction(Action):
+
+ def __init__(self, name, help):
+ sup = super(_SubParsersAction._ChoicesPseudoAction, self)
+ sup.__init__(option_strings=[], dest=name, help=help)
+
+ def __init__(self,
+ option_strings,
+ prog,
+ parser_class,
+ dest=SUPPRESS,
+ help=None,
+ metavar=None):
+
+ self._prog_prefix = prog
+ self._parser_class = parser_class
+ self._name_parser_map = {}
+ self._choices_actions = []
+
+ super(_SubParsersAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=PARSER,
+ choices=self._name_parser_map,
+ help=help,
+ metavar=metavar)
+
+ def add_parser(self, name, **kwargs):
+ # set prog from the existing prefix
+ if kwargs.get('prog') is None:
+ kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
+
+ # create a pseudo-action to hold the choice help
+ if 'help' in kwargs:
+ help = kwargs.pop('help')
+ choice_action = self._ChoicesPseudoAction(name, help)
+ self._choices_actions.append(choice_action)
+
+ # create the parser and add it to the map
+ parser = self._parser_class(**kwargs)
+ self._name_parser_map[name] = parser
+ return parser
+
+ def _get_subactions(self):
+ return self._choices_actions
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ parser_name = values[0]
+ arg_strings = values[1:]
+
+ # set the parser name if requested
+ if self.dest is not SUPPRESS:
+ setattr(namespace, self.dest, parser_name)
+
+ # select the parser
+ try:
+ parser = self._name_parser_map[parser_name]
+ except KeyError:
+ tup = parser_name, ', '.join(self._name_parser_map)
+ msg = _('unknown parser %r (choices: %s)' % tup)
+ raise ArgumentError(self, msg)
+
+ # parse all the remaining options into the namespace
+ # store any unrecognized options on the object, so that the top
+ # level parser can decide what to do with them
+ namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
+ if arg_strings:
+ vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
+ getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
+
+
+# ==============
+# Type classes
+# ==============
+
+class FileType(object):
+ """Factory for creating file object types
+
+ Instances of FileType are typically passed as type= arguments to the
+ ArgumentParser add_argument() method.
+
+ Keyword Arguments:
+ - mode -- A string indicating how the file is to be opened. Accepts the
+ same values as the builtin open() function.
+ - bufsize -- The file's desired buffer size. Accepts the same values as
+ the builtin open() function.
+ """
+
+ def __init__(self, mode='r', bufsize=None):
+ self._mode = mode
+ self._bufsize = bufsize
+
+ def __call__(self, string):
+ # the special argument "-" means sys.std{in,out}
+ if string == '-':
+ if 'r' in self._mode:
+ return _sys.stdin
+ elif 'w' in self._mode:
+ return _sys.stdout
+ else:
+ msg = _('argument "-" with mode %r' % self._mode)
+ raise ValueError(msg)
+
+ # all other arguments are used as file names
+ if self._bufsize:
+ return open(string, self._mode, self._bufsize)
+ else:
+ return open(string, self._mode)
+
+ def __repr__(self):
+ args = [self._mode, self._bufsize]
+ args_str = ', '.join([repr(arg) for arg in args if arg is not None])
+ return '%s(%s)' % (type(self).__name__, args_str)
+
+# ===========================
+# Optional and Positional Parsing
+# ===========================
+
+class Namespace(_AttributeHolder):
+ """Simple object for storing attributes.
+
+ Implements equality by attribute names and values, and provides a simple
+ string representation.
+ """
+
+ def __init__(self, **kwargs):
+ for name in kwargs:
+ setattr(self, name, kwargs[name])
+
+ __hash__ = None
+
+ def __eq__(self, other):
+ return vars(self) == vars(other)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __contains__(self, key):
+ return key in self.__dict__
+
+
+class _ActionsContainer(object):
+
+ def __init__(self,
+ description,
+ prefix_chars,
+ argument_default,
+ conflict_handler):
+ super(_ActionsContainer, self).__init__()
+
+ self.description = description
+ self.argument_default = argument_default
+ self.prefix_chars = prefix_chars
+ self.conflict_handler = conflict_handler
+
+ # set up registries
+ self._registries = {}
+
+ # register actions
+ self.register('action', None, _StoreAction)
+ self.register('action', 'store', _StoreAction)
+ self.register('action', 'store_const', _StoreConstAction)
+ self.register('action', 'store_true', _StoreTrueAction)
+ self.register('action', 'store_false', _StoreFalseAction)
+ self.register('action', 'append', _AppendAction)
+ self.register('action', 'append_const', _AppendConstAction)
+ self.register('action', 'count', _CountAction)
+ self.register('action', 'help', _HelpAction)
+ self.register('action', 'version', _VersionAction)
+ self.register('action', 'parsers', _SubParsersAction)
+
+ # raise an exception if the conflict handler is invalid
+ self._get_handler()
+
+ # action storage
+ self._actions = []
+ self._option_string_actions = {}
+
+ # groups
+ self._action_groups = []
+ self._mutually_exclusive_groups = []
+
+ # defaults storage
+ self._defaults = {}
+
+ # determines whether an "option" looks like a negative number
+ self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
+
+ # whether or not there are any optionals that look like negative
+ # numbers -- uses a list so it can be shared and edited
+ self._has_negative_number_optionals = []
+
+ # ====================
+ # Registration methods
+ # ====================
+ def register(self, registry_name, value, object):
+ registry = self._registries.setdefault(registry_name, {})
+ registry[value] = object
+
+ def _registry_get(self, registry_name, value, default=None):
+ return self._registries[registry_name].get(value, default)
+
+ # ==================================
+ # Namespace default accessor methods
+ # ==================================
+ def set_defaults(self, **kwargs):
+ self._defaults.update(kwargs)
+
+ # if these defaults match any existing arguments, replace
+ # the previous default on the object with the new one
+ for action in self._actions:
+ if action.dest in kwargs:
+ action.default = kwargs[action.dest]
+
+ def get_default(self, dest):
+ for action in self._actions:
+ if action.dest == dest and action.default is not None:
+ return action.default
+ return self._defaults.get(dest, None)
+
+
+ # =======================
+ # Adding argument actions
+ # =======================
+ def add_argument(self, *args, **kwargs):
+ """
+ add_argument(dest, ..., name=value, ...)
+ add_argument(option_string, option_string, ..., name=value, ...)
+ """
+
+ # if no positional args are supplied or only one is supplied and
+ # it doesn't look like an option string, parse a positional
+ # argument
+ chars = self.prefix_chars
+ if not args or len(args) == 1 and args[0][0] not in chars:
+ if args and 'dest' in kwargs:
+ raise ValueError('dest supplied twice for positional argument')
+ kwargs = self._get_positional_kwargs(*args, **kwargs)
+
+ # otherwise, we're adding an optional argument
+ else:
+ kwargs = self._get_optional_kwargs(*args, **kwargs)
+
+ # if no default was supplied, use the parser-level default
+ if 'default' not in kwargs:
+ dest = kwargs['dest']
+ if dest in self._defaults:
+ kwargs['default'] = self._defaults[dest]
+ elif self.argument_default is not None:
+ kwargs['default'] = self.argument_default
+
+ # create the action object, and add it to the parser
+ action_class = self._pop_action_class(kwargs)
+ if not _callable(action_class):
+ raise ValueError('unknown action "%s"' % action_class)
+ action = action_class(**kwargs)
+
+ # raise an error if the action type is not callable
+ type_func = self._registry_get('type', action.type, action.type)
+ if not _callable(type_func):
+ raise ValueError('%r is not callable' % type_func)
+
+ return self._add_action(action)
+
+ def add_argument_group(self, *args, **kwargs):
+ group = _ArgumentGroup(self, *args, **kwargs)
+ self._action_groups.append(group)
+ return group
+
+ def add_mutually_exclusive_group(self, **kwargs):
+ group = _MutuallyExclusiveGroup(self, **kwargs)
+ self._mutually_exclusive_groups.append(group)
+ return group
+
+ def _add_action(self, action):
+ # resolve any conflicts
+ self._check_conflict(action)
+
+ # add to actions list
+ self._actions.append(action)
+ action.container = self
+
+ # index the action by any option strings it has
+ for option_string in action.option_strings:
+ self._option_string_actions[option_string] = action
+
+ # set the flag if any option strings look like negative numbers
+ for option_string in action.option_strings:
+ if self._negative_number_matcher.match(option_string):
+ if not self._has_negative_number_optionals:
+ self._has_negative_number_optionals.append(True)
+
+ # return the created action
+ return action
+
+ def _remove_action(self, action):
+ self._actions.remove(action)
+
+ def _add_container_actions(self, container):
+ # collect groups by titles
+ title_group_map = {}
+ for group in self._action_groups:
+ if group.title in title_group_map:
+ msg = _('cannot merge actions - two groups are named %r')
+ raise ValueError(msg % (group.title))
+ title_group_map[group.title] = group
+
+ # map each action to its group
+ group_map = {}
+ for group in container._action_groups:
+
+ # if a group with the title exists, use that, otherwise
+ # create a new group matching the container's group
+ if group.title not in title_group_map:
+ title_group_map[group.title] = self.add_argument_group(
+ title=group.title,
+ description=group.description,
+ conflict_handler=group.conflict_handler)
+
+ # map the actions to their new group
+ for action in group._group_actions:
+ group_map[action] = title_group_map[group.title]
+
+ # add container's mutually exclusive groups
+ # NOTE: if add_mutually_exclusive_group ever gains title= and
+ # description= then this code will need to be expanded as above
+ for group in container._mutually_exclusive_groups:
+ mutex_group = self.add_mutually_exclusive_group(
+ required=group.required)
+
+ # map the actions to their new mutex group
+ for action in group._group_actions:
+ group_map[action] = mutex_group
+
+ # add all actions to this container or their group
+ for action in container._actions:
+ group_map.get(action, self)._add_action(action)
+
+ def _get_positional_kwargs(self, dest, **kwargs):
+ # make sure required is not specified
+ if 'required' in kwargs:
+ msg = _("'required' is an invalid argument for positionals")
+ raise TypeError(msg)
+
+ # mark positional arguments as required if at least one is
+ # always required
+ if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
+ kwargs['required'] = True
+ if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
+ kwargs['required'] = True
+
+ # return the keyword arguments with no option strings
+ return dict(kwargs, dest=dest, option_strings=[])
+
+ def _get_optional_kwargs(self, *args, **kwargs):
+ # determine short and long option strings
+ option_strings = []
+ long_option_strings = []
+ for option_string in args:
+ # error on strings that don't start with an appropriate prefix
+ if not option_string[0] in self.prefix_chars:
+ msg = _('invalid option string %r: '
+ 'must start with a character %r')
+ tup = option_string, self.prefix_chars
+ raise ValueError(msg % tup)
+
+ # strings starting with two prefix characters are long options
+ option_strings.append(option_string)
+ if option_string[0] in self.prefix_chars:
+ if len(option_string) > 1:
+ if option_string[1] in self.prefix_chars:
+ long_option_strings.append(option_string)
+
+ # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
+ dest = kwargs.pop('dest', None)
+ if dest is None:
+ if long_option_strings:
+ dest_option_string = long_option_strings[0]
+ else:
+ dest_option_string = option_strings[0]
+ dest = dest_option_string.lstrip(self.prefix_chars)
+ if not dest:
+ msg = _('dest= is required for options like %r')
+ raise ValueError(msg % option_string)
+ dest = dest.replace('-', '_')
+
+ # return the updated keyword arguments
+ return dict(kwargs, dest=dest, option_strings=option_strings)
+
+ def _pop_action_class(self, kwargs, default=None):
+ action = kwargs.pop('action', default)
+ return self._registry_get('action', action, action)
+
+ def _get_handler(self):
+ # determine function from conflict handler string
+ handler_func_name = '_handle_conflict_%s' % self.conflict_handler
+ try:
+ return getattr(self, handler_func_name)
+ except AttributeError:
+ msg = _('invalid conflict_resolution value: %r')
+ raise ValueError(msg % self.conflict_handler)
+
+ def _check_conflict(self, action):
+
+ # find all options that conflict with this option
+ confl_optionals = []
+ for option_string in action.option_strings:
+ if option_string in self._option_string_actions:
+ confl_optional = self._option_string_actions[option_string]
+ confl_optionals.append((option_string, confl_optional))
+
+ # resolve any conflicts
+ if confl_optionals:
+ conflict_handler = self._get_handler()
+ conflict_handler(action, confl_optionals)
+
+ def _handle_conflict_error(self, action, conflicting_actions):
+ message = _('conflicting option string(s): %s')
+ conflict_string = ', '.join([option_string
+ for option_string, action
+ in conflicting_actions])
+ raise ArgumentError(action, message % conflict_string)
+
+ def _handle_conflict_resolve(self, action, conflicting_actions):
+
+ # remove all conflicting options
+ for option_string, action in conflicting_actions:
+
+ # remove the conflicting option
+ action.option_strings.remove(option_string)
+ self._option_string_actions.pop(option_string, None)
+
+ # if the option now has no option string, remove it from the
+ # container holding it
+ if not action.option_strings:
+ action.container._remove_action(action)
+
+
+class _ArgumentGroup(_ActionsContainer):
+
+ def __init__(self, container, title=None, description=None, **kwargs):
+ # add any missing keyword arguments by checking the container
+ update = kwargs.setdefault
+ update('conflict_handler', container.conflict_handler)
+ update('prefix_chars', container.prefix_chars)
+ update('argument_default', container.argument_default)
+ super_init = super(_ArgumentGroup, self).__init__
+ super_init(description=description, **kwargs)
+
+ # group attributes
+ self.title = title
+ self._group_actions = []
+
+ # share most attributes with the container
+ self._registries = container._registries
+ self._actions = container._actions
+ self._option_string_actions = container._option_string_actions
+ self._defaults = container._defaults
+ self._has_negative_number_optionals = \
+ container._has_negative_number_optionals
+
+ def _add_action(self, action):
+ action = super(_ArgumentGroup, self)._add_action(action)
+ self._group_actions.append(action)
+ return action
+
+ def _remove_action(self, action):
+ super(_ArgumentGroup, self)._remove_action(action)
+ self._group_actions.remove(action)
+
+
+class _MutuallyExclusiveGroup(_ArgumentGroup):
+
+ def __init__(self, container, required=False):
+ super(_MutuallyExclusiveGroup, self).__init__(container)
+ self.required = required
+ self._container = container
+
+ def _add_action(self, action):
+ if action.required:
+ msg = _('mutually exclusive arguments must be optional')
+ raise ValueError(msg)
+ action = self._container._add_action(action)
+ self._group_actions.append(action)
+ return action
+
+ def _remove_action(self, action):
+ self._container._remove_action(action)
+ self._group_actions.remove(action)
+
+
+class ArgumentParser(_AttributeHolder, _ActionsContainer):
+ """Object for parsing command line strings into Python objects.
+
+ Keyword Arguments:
+ - prog -- The name of the program (default: sys.argv[0])
+ - usage -- A usage message (default: auto-generated from arguments)
+ - description -- A description of what the program does
+ - epilog -- Text following the argument descriptions
+ - parents -- Parsers whose arguments should be copied into this one
+ - formatter_class -- HelpFormatter class for printing help messages
+ - prefix_chars -- Characters that prefix optional arguments
+ - fromfile_prefix_chars -- Characters that prefix files containing
+ additional arguments
+ - argument_default -- The default value for all arguments
+ - conflict_handler -- String indicating how to handle conflicts
+ - add_help -- Add a -h/-help option
+ """
+
+ def __init__(self,
+ prog=None,
+ usage=None,
+ description=None,
+ epilog=None,
+ version=None,
+ parents=[],
+ formatter_class=HelpFormatter,
+ prefix_chars='-',
+ fromfile_prefix_chars=None,
+ argument_default=None,
+ conflict_handler='error',
+ add_help=True):
+
+ if version is not None:
+ import warnings
+ warnings.warn(
+ """The "version" argument to ArgumentParser is deprecated. """
+ """Please use """
+ """"add_argument(..., action='version', version="N", ...)" """
+ """instead""", DeprecationWarning)
+
+ superinit = super(ArgumentParser, self).__init__
+ superinit(description=description,
+ prefix_chars=prefix_chars,
+ argument_default=argument_default,
+ conflict_handler=conflict_handler)
+
+ # default setting for prog
+ if prog is None:
+ prog = _os.path.basename(_sys.argv[0])
+
+ self.prog = prog
+ self.usage = usage
+ self.epilog = epilog
+ self.version = version
+ self.formatter_class = formatter_class
+ self.fromfile_prefix_chars = fromfile_prefix_chars
+ self.add_help = add_help
+
+ add_group = self.add_argument_group
+ self._positionals = add_group(_('positional arguments'))
+ self._optionals = add_group(_('optional arguments'))
+ self._subparsers = None
+
+ # register types
+ def identity(string):
+ return string
+ self.register('type', None, identity)
+
+ # add help and version arguments if necessary
+ # (using explicit default to override global argument_default)
+ if '-' in prefix_chars:
+ default_prefix = '-'
+ else:
+ default_prefix = prefix_chars[0]
+ if self.add_help:
+ self.add_argument(
+ default_prefix+'h', default_prefix*2+'help',
+ action='help', default=SUPPRESS,
+ help=_('show this help message and exit'))
+ if self.version:
+ self.add_argument(
+ default_prefix+'v', default_prefix*2+'version',
+ action='version', default=SUPPRESS,
+ version=self.version,
+ help=_("show program's version number and exit"))
+
+ # add parent arguments and defaults
+ for parent in parents:
+ self._add_container_actions(parent)
+ try:
+ defaults = parent._defaults
+ except AttributeError:
+ pass
+ else:
+ self._defaults.update(defaults)
+
+ # =======================
+ # Pretty __repr__ methods
+ # =======================
+ def _get_kwargs(self):
+ names = [
+ 'prog',
+ 'usage',
+ 'description',
+ 'version',
+ 'formatter_class',
+ 'conflict_handler',
+ 'add_help',
+ ]
+ return [(name, getattr(self, name)) for name in names]
+
+ # ==================================
+ # Optional/Positional adding methods
+ # ==================================
+ def add_subparsers(self, **kwargs):
+ if self._subparsers is not None:
+ self.error(_('cannot have multiple subparser arguments'))
+
+ # add the parser class to the arguments if it's not present
+ kwargs.setdefault('parser_class', type(self))
+
+ if 'title' in kwargs or 'description' in kwargs:
+ title = _(kwargs.pop('title', 'subcommands'))
+ description = _(kwargs.pop('description', None))
+ self._subparsers = self.add_argument_group(title, description)
+ else:
+ self._subparsers = self._positionals
+
+ # prog defaults to the usage message of this parser, skipping
+ # optional arguments and with no "usage:" prefix
+ if kwargs.get('prog') is None:
+ formatter = self._get_formatter()
+ positionals = self._get_positional_actions()
+ groups = self._mutually_exclusive_groups
+ formatter.add_usage(self.usage, positionals, groups, '')
+ kwargs['prog'] = formatter.format_help().strip()
+
+ # create the parsers action and add it to the positionals list
+ parsers_class = self._pop_action_class(kwargs, 'parsers')
+ action = parsers_class(option_strings=[], **kwargs)
+ self._subparsers._add_action(action)
+
+ # return the created parsers action
+ return action
+
+ def _add_action(self, action):
+ if action.option_strings:
+ self._optionals._add_action(action)
+ else:
+ self._positionals._add_action(action)
+ return action
+
+ def _get_optional_actions(self):
+ return [action
+ for action in self._actions
+ if action.option_strings]
+
+ def _get_positional_actions(self):
+ return [action
+ for action in self._actions
+ if not action.option_strings]
+
+ # =====================================
+ # Command line argument parsing methods
+ # =====================================
+ def parse_args(self, args=None, namespace=None):
+ args, argv = self.parse_known_args(args, namespace)
+ if argv:
+ msg = _('unrecognized arguments: %s')
+ self.error(msg % ' '.join(argv))
+ return args
+
+ def parse_known_args(self, args=None, namespace=None):
+ # args default to the system args
+ if args is None:
+ args = _sys.argv[1:]
+
+ # default Namespace built from parser defaults
+ if namespace is None:
+ namespace = Namespace()
+
+ # add any action defaults that aren't present
+ for action in self._actions:
+ if action.dest is not SUPPRESS:
+ if not hasattr(namespace, action.dest):
+ if action.default is not SUPPRESS:
+ default = action.default
+ if isinstance(action.default, basestring):
+ default = self._get_value(action, default)
+ setattr(namespace, action.dest, default)
+
+ # add any parser defaults that aren't present
+ for dest in self._defaults:
+ if not hasattr(namespace, dest):
+ setattr(namespace, dest, self._defaults[dest])
+
+ # parse the arguments and exit if there are any errors
+ try:
+ namespace, args = self._parse_known_args(args, namespace)
+ if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
+ args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
+ delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
+ return namespace, args
+ except ArgumentError:
+ err = _sys.exc_info()[1]
+ self.error(str(err))
+
+ def _parse_known_args(self, arg_strings, namespace):
+ # replace arg strings that are file references
+ if self.fromfile_prefix_chars is not None:
+ arg_strings = self._read_args_from_files(arg_strings)
+
+ # map all mutually exclusive arguments to the other arguments
+ # they can't occur with
+ action_conflicts = {}
+ for mutex_group in self._mutually_exclusive_groups:
+ group_actions = mutex_group._group_actions
+ for i, mutex_action in enumerate(mutex_group._group_actions):
+ conflicts = action_conflicts.setdefault(mutex_action, [])
+ conflicts.extend(group_actions[:i])
+ conflicts.extend(group_actions[i + 1:])
+
+ # find all option indices, and determine the arg_string_pattern
+ # which has an 'O' if there is an option at an index,
+ # an 'A' if there is an argument, or a '-' if there is a '--'
+ option_string_indices = {}
+ arg_string_pattern_parts = []
+ arg_strings_iter = iter(arg_strings)
+ for i, arg_string in enumerate(arg_strings_iter):
+
+ # all args after -- are non-options
+ if arg_string == '--':
+ arg_string_pattern_parts.append('-')
+ for arg_string in arg_strings_iter:
+ arg_string_pattern_parts.append('A')
+
+ # otherwise, add the arg to the arg strings
+ # and note the index if it was an option
+ else:
+ option_tuple = self._parse_optional(arg_string)
+ if option_tuple is None:
+ pattern = 'A'
+ else:
+ option_string_indices[i] = option_tuple
+ pattern = 'O'
+ arg_string_pattern_parts.append(pattern)
+
+ # join the pieces together to form the pattern
+ arg_strings_pattern = ''.join(arg_string_pattern_parts)
+
+ # converts arg strings to the appropriate and then takes the action
+ seen_actions = set()
+ seen_non_default_actions = set()
+
+ def take_action(action, argument_strings, option_string=None):
+ seen_actions.add(action)
+ argument_values = self._get_values(action, argument_strings)
+
+ # error if this argument is not allowed with other previously
+ # seen arguments, assuming that actions that use the default
+ # value don't really count as "present"
+ if argument_values is not action.default:
+ seen_non_default_actions.add(action)
+ for conflict_action in action_conflicts.get(action, []):
+ if conflict_action in seen_non_default_actions:
+ msg = _('not allowed with argument %s')
+ action_name = _get_action_name(conflict_action)
+ raise ArgumentError(action, msg % action_name)
+
+ # take the action if we didn't receive a SUPPRESS value
+ # (e.g. from a default)
+ if argument_values is not SUPPRESS:
+ action(self, namespace, argument_values, option_string)
+
+ # function to convert arg_strings into an optional action
+ def consume_optional(start_index):
+
+ # get the optional identified at this index
+ option_tuple = option_string_indices[start_index]
+ action, option_string, explicit_arg = option_tuple
+
+ # identify additional optionals in the same arg string
+ # (e.g. -xyz is the same as -x -y -z if no args are required)
+ match_argument = self._match_argument
+ action_tuples = []
+ while True:
+
+ # if we found no optional action, skip it
+ if action is None:
+ extras.append(arg_strings[start_index])
+ return start_index + 1
+
+ # if there is an explicit argument, try to match the
+ # optional's string arguments to only this
+ if explicit_arg is not None:
+ arg_count = match_argument(action, 'A')
+
+ # if the action is a single-dash option and takes no
+ # arguments, try to parse more single-dash options out
+ # of the tail of the option string
+ chars = self.prefix_chars
+ if arg_count == 0 and option_string[1] not in chars:
+ action_tuples.append((action, [], option_string))
+ char = option_string[0]
+ option_string = char + explicit_arg[0]
+ new_explicit_arg = explicit_arg[1:] or None
+ optionals_map = self._option_string_actions
+ if option_string in optionals_map:
+ action = optionals_map[option_string]
+ explicit_arg = new_explicit_arg
+ else:
+ msg = _('ignored explicit argument %r')
+ raise ArgumentError(action, msg % explicit_arg)
+
+ # if the action expect exactly one argument, we've
+ # successfully matched the option; exit the loop
+ elif arg_count == 1:
+ stop = start_index + 1
+ args = [explicit_arg]
+ action_tuples.append((action, args, option_string))
+ break
+
+ # error if a double-dash option did not use the
+ # explicit argument
+ else:
+ msg = _('ignored explicit argument %r')
+ raise ArgumentError(action, msg % explicit_arg)
+
+ # if there is no explicit argument, try to match the
+ # optional's string arguments with the following strings
+ # if successful, exit the loop
+ else:
+ start = start_index + 1
+ selected_patterns = arg_strings_pattern[start:]
+ arg_count = match_argument(action, selected_patterns)
+ stop = start + arg_count
+ args = arg_strings[start:stop]
+ action_tuples.append((action, args, option_string))
+ break
+
+ # add the Optional to the list and return the index at which
+ # the Optional's string args stopped
+ assert action_tuples
+ for action, args, option_string in action_tuples:
+ take_action(action, args, option_string)
+ return stop
+
+ # the list of Positionals left to be parsed; this is modified
+ # by consume_positionals()
+ positionals = self._get_positional_actions()
+
+ # function to convert arg_strings into positional actions
+ def consume_positionals(start_index):
+ # match as many Positionals as possible
+ match_partial = self._match_arguments_partial
+ selected_pattern = arg_strings_pattern[start_index:]
+ arg_counts = match_partial(positionals, selected_pattern)
+
+ # slice off the appropriate arg strings for each Positional
+ # and add the Positional and its args to the list
+ for action, arg_count in zip(positionals, arg_counts):
+ args = arg_strings[start_index: start_index + arg_count]
+ start_index += arg_count
+ take_action(action, args)
+
+ # slice off the Positionals that we just parsed and return the
+ # index at which the Positionals' string args stopped
+ positionals[:] = positionals[len(arg_counts):]
+ return start_index
+
+ # consume Positionals and Optionals alternately, until we have
+ # passed the last option string
+ extras = []
+ start_index = 0
+ if option_string_indices:
+ max_option_string_index = max(option_string_indices)
+ else:
+ max_option_string_index = -1
+ while start_index <= max_option_string_index:
+
+ # consume any Positionals preceding the next option
+ next_option_string_index = min([
+ index
+ for index in option_string_indices
+ if index >= start_index])
+ if start_index != next_option_string_index:
+ positionals_end_index = consume_positionals(start_index)
+
+ # only try to parse the next optional if we didn't consume
+ # the option string during the positionals parsing
+ if positionals_end_index > start_index:
+ start_index = positionals_end_index
+ continue
+ else:
+ start_index = positionals_end_index
+
+ # if we consumed all the positionals we could and we're not
+ # at the index of an option string, there were extra arguments
+ if start_index not in option_string_indices:
+ strings = arg_strings[start_index:next_option_string_index]
+ extras.extend(strings)
+ start_index = next_option_string_index
+
+ # consume the next optional and any arguments for it
+ start_index = consume_optional(start_index)
+
+ # consume any positionals following the last Optional
+ stop_index = consume_positionals(start_index)
+
+ # if we didn't consume all the argument strings, there were extras
+ extras.extend(arg_strings[stop_index:])
+
+ # if we didn't use all the Positional objects, there were too few
+ # arg strings supplied.
+ if positionals:
+ self.error(_('too few arguments'))
+
+ # make sure all required actions were present
+ for action in self._actions:
+ if action.required:
+ if action not in seen_actions:
+ name = _get_action_name(action)
+ self.error(_('argument %s is required') % name)
+
+ # make sure all required groups had one option present
+ for group in self._mutually_exclusive_groups:
+ if group.required:
+ for action in group._group_actions:
+ if action in seen_non_default_actions:
+ break
+
+ # if no actions were used, report the error
+ else:
+ names = [_get_action_name(action)
+ for action in group._group_actions
+ if action.help is not SUPPRESS]
+ msg = _('one of the arguments %s is required')
+ self.error(msg % ' '.join(names))
+
+ # return the updated namespace and the extra arguments
+ return namespace, extras
+
+ def _read_args_from_files(self, arg_strings):
+ # expand arguments referencing files
+ new_arg_strings = []
+ for arg_string in arg_strings:
+
+ # for regular arguments, just add them back into the list
+ if arg_string[0] not in self.fromfile_prefix_chars:
+ new_arg_strings.append(arg_string)
+
+ # replace arguments referencing files with the file content
+ else:
+ try:
+ args_file = open(arg_string[1:])
+ try:
+ arg_strings = []
+ for arg_line in args_file.read().splitlines():
+ for arg in self.convert_arg_line_to_args(arg_line):
+ arg_strings.append(arg)
+ arg_strings = self._read_args_from_files(arg_strings)
+ new_arg_strings.extend(arg_strings)
+ finally:
+ args_file.close()
+ except IOError:
+ err = _sys.exc_info()[1]
+ self.error(str(err))
+
+ # return the modified argument list
+ return new_arg_strings
+
+ def convert_arg_line_to_args(self, arg_line):
+ return [arg_line]
+
+ def _match_argument(self, action, arg_strings_pattern):
+ # match the pattern for this action to the arg strings
+ nargs_pattern = self._get_nargs_pattern(action)
+ match = _re.match(nargs_pattern, arg_strings_pattern)
+
+ # raise an exception if we weren't able to find a match
+ if match is None:
+ nargs_errors = {
+ None: _('expected one argument'),
+ OPTIONAL: _('expected at most one argument'),
+ ONE_OR_MORE: _('expected at least one argument'),
+ }
+ default = _('expected %s argument(s)') % action.nargs
+ msg = nargs_errors.get(action.nargs, default)
+ raise ArgumentError(action, msg)
+
+ # return the number of arguments matched
+ return len(match.group(1))
+
+ def _match_arguments_partial(self, actions, arg_strings_pattern):
+ # progressively shorten the actions list by slicing off the
+ # final actions until we find a match
+ result = []
+ for i in range(len(actions), 0, -1):
+ actions_slice = actions[:i]
+ pattern = ''.join([self._get_nargs_pattern(action)
+ for action in actions_slice])
+ match = _re.match(pattern, arg_strings_pattern)
+ if match is not None:
+ result.extend([len(string) for string in match.groups()])
+ break
+
+ # return the list of arg string counts
+ return result
+
+ def _parse_optional(self, arg_string):
+ # if it's an empty string, it was meant to be a positional
+ if not arg_string:
+ return None
+
+ # if it doesn't start with a prefix, it was meant to be positional
+ if not arg_string[0] in self.prefix_chars:
+ return None
+
+ # if the option string is present in the parser, return the action
+ if arg_string in self._option_string_actions:
+ action = self._option_string_actions[arg_string]
+ return action, arg_string, None
+
+ # if it's just a single character, it was meant to be positional
+ if len(arg_string) == 1:
+ return None
+
+ # if the option string before the "=" is present, return the action
+ if '=' in arg_string:
+ option_string, explicit_arg = arg_string.split('=', 1)
+ if option_string in self._option_string_actions:
+ action = self._option_string_actions[option_string]
+ return action, option_string, explicit_arg
+
+ # search through all possible prefixes of the option string
+ # and all actions in the parser for possible interpretations
+ option_tuples = self._get_option_tuples(arg_string)
+
+ # if multiple actions match, the option string was ambiguous
+ if len(option_tuples) > 1:
+ options = ', '.join([option_string
+ for action, option_string, explicit_arg in option_tuples])
+ tup = arg_string, options
+ self.error(_('ambiguous option: %s could match %s') % tup)
+
+ # if exactly one action matched, this segmentation is good,
+ # so return the parsed action
+ elif len(option_tuples) == 1:
+ option_tuple, = option_tuples
+ return option_tuple
+
+ # if it was not found as an option, but it looks like a negative
+ # number, it was meant to be positional
+ # unless there are negative-number-like options
+ if self._negative_number_matcher.match(arg_string):
+ if not self._has_negative_number_optionals:
+ return None
+
+ # if it contains a space, it was meant to be a positional
+ if ' ' in arg_string:
+ return None
+
+ # it was meant to be an optional but there is no such option
+ # in this parser (though it might be a valid option in a subparser)
+ return None, arg_string, None
+
+ def _get_option_tuples(self, option_string):
+ result = []
+
+ # option strings starting with two prefix characters are only
+ # split at the '='
+ chars = self.prefix_chars
+ if option_string[0] in chars and option_string[1] in chars:
+ if '=' in option_string:
+ option_prefix, explicit_arg = option_string.split('=', 1)
+ else:
+ option_prefix = option_string
+ explicit_arg = None
+ for option_string in self._option_string_actions:
+ if option_string.startswith(option_prefix):
+ action = self._option_string_actions[option_string]
+ tup = action, option_string, explicit_arg
+ result.append(tup)
+
+ # single character options can be concatenated with their arguments
+ # but multiple character options always have to have their argument
+ # separate
+ elif option_string[0] in chars and option_string[1] not in chars:
+ option_prefix = option_string
+ explicit_arg = None
+ short_option_prefix = option_string[:2]
+ short_explicit_arg = option_string[2:]
+
+ for option_string in self._option_string_actions:
+ if option_string == short_option_prefix:
+ action = self._option_string_actions[option_string]
+ tup = action, option_string, short_explicit_arg
+ result.append(tup)
+ elif option_string.startswith(option_prefix):
+ action = self._option_string_actions[option_string]
+ tup = action, option_string, explicit_arg
+ result.append(tup)
+
+ # shouldn't ever get here
+ else:
+ self.error(_('unexpected option string: %s') % option_string)
+
+ # return the collected option tuples
+ return result
+
+ def _get_nargs_pattern(self, action):
+ # in all examples below, we have to allow for '--' args
+ # which are represented as '-' in the pattern
+ nargs = action.nargs
+
+ # the default (None) is assumed to be a single argument
+ if nargs is None:
+ nargs_pattern = '(-*A-*)'
+
+ # allow zero or one arguments
+ elif nargs == OPTIONAL:
+ nargs_pattern = '(-*A?-*)'
+
+ # allow zero or more arguments
+ elif nargs == ZERO_OR_MORE:
+ nargs_pattern = '(-*[A-]*)'
+
+ # allow one or more arguments
+ elif nargs == ONE_OR_MORE:
+ nargs_pattern = '(-*A[A-]*)'
+
+ # allow any number of options or arguments
+ elif nargs == REMAINDER:
+ nargs_pattern = '([-AO]*)'
+
+ # allow one argument followed by any number of options or arguments
+ elif nargs == PARSER:
+ nargs_pattern = '(-*A[-AO]*)'
+
+ # all others should be integers
+ else:
+ nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
+
+ # if this is an optional action, -- is not allowed
+ if action.option_strings:
+ nargs_pattern = nargs_pattern.replace('-*', '')
+ nargs_pattern = nargs_pattern.replace('-', '')
+
+ # return the pattern
+ return nargs_pattern
+
+ # ========================
+ # Value conversion methods
+ # ========================
+ def _get_values(self, action, arg_strings):
+ # for everything but PARSER args, strip out '--'
+ if action.nargs not in [PARSER, REMAINDER]:
+ arg_strings = [s for s in arg_strings if s != '--']
+
+ # optional argument produces a default when not present
+ if not arg_strings and action.nargs == OPTIONAL:
+ if action.option_strings:
+ value = action.const
+ else:
+ value = action.default
+ if isinstance(value, basestring):
+ value = self._get_value(action, value)
+ self._check_value(action, value)
+
+ # when nargs='*' on a positional, if there were no command-line
+ # args, use the default if it is anything other than None
+ elif (not arg_strings and action.nargs == ZERO_OR_MORE and
+ not action.option_strings):
+ if action.default is not None:
+ value = action.default
+ else:
+ value = arg_strings
+ self._check_value(action, value)
+
+ # single argument or optional argument produces a single value
+ elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
+ arg_string, = arg_strings
+ value = self._get_value(action, arg_string)
+ self._check_value(action, value)
+
+ # REMAINDER arguments convert all values, checking none
+ elif action.nargs == REMAINDER:
+ value = [self._get_value(action, v) for v in arg_strings]
+
+ # PARSER arguments convert all values, but check only the first
+ elif action.nargs == PARSER:
+ value = [self._get_value(action, v) for v in arg_strings]
+ self._check_value(action, value[0])
+
+ # all other types of nargs produce a list
+ else:
+ value = [self._get_value(action, v) for v in arg_strings]
+ for v in value:
+ self._check_value(action, v)
+
+ # return the converted value
+ return value
+
+ def _get_value(self, action, arg_string):
+ type_func = self._registry_get('type', action.type, action.type)
+ if not _callable(type_func):
+ msg = _('%r is not callable')
+ raise ArgumentError(action, msg % type_func)
+
+ # convert the value to the appropriate type
+ try:
+ result = type_func(arg_string)
+
+ # ArgumentTypeErrors indicate errors
+ except ArgumentTypeError:
+ name = getattr(action.type, '__name__', repr(action.type))
+ msg = str(_sys.exc_info()[1])
+ raise ArgumentError(action, msg)
+
+ # TypeErrors or ValueErrors also indicate errors
+ except (TypeError, ValueError):
+ name = getattr(action.type, '__name__', repr(action.type))
+ msg = _('invalid %s value: %r')
+ raise ArgumentError(action, msg % (name, arg_string))
+
+ # return the converted value
+ return result
+
+ def _check_value(self, action, value):
+ # converted value must be one of the choices (if specified)
+ if action.choices is not None and value not in action.choices:
+ tup = value, ', '.join(map(repr, action.choices))
+ msg = _('invalid choice: %r (choose from %s)') % tup
+ raise ArgumentError(action, msg)
+
+ # =======================
+ # Help-formatting methods
+ # =======================
+ def format_usage(self):
+ formatter = self._get_formatter()
+ formatter.add_usage(self.usage, self._actions,
+ self._mutually_exclusive_groups)
+ return formatter.format_help()
+
+ def format_help(self):
+ formatter = self._get_formatter()
+
+ # usage
+ formatter.add_usage(self.usage, self._actions,
+ self._mutually_exclusive_groups)
+
+ # description
+ formatter.add_text(self.description)
+
+ # positionals, optionals and user-defined groups
+ for action_group in self._action_groups:
+ formatter.start_section(action_group.title)
+ formatter.add_text(action_group.description)
+ formatter.add_arguments(action_group._group_actions)
+ formatter.end_section()
+
+ # epilog
+ formatter.add_text(self.epilog)
+
+ # determine help from format above
+ return formatter.format_help()
+
+ def format_version(self):
+ import warnings
+ warnings.warn(
+ 'The format_version method is deprecated -- the "version" '
+ 'argument to ArgumentParser is no longer supported.',
+ DeprecationWarning)
+ formatter = self._get_formatter()
+ formatter.add_text(self.version)
+ return formatter.format_help()
+
+ def _get_formatter(self):
+ return self.formatter_class(prog=self.prog)
+
+ # =====================
+ # Help-printing methods
+ # =====================
+ def print_usage(self, file=None):
+ if file is None:
+ file = _sys.stdout
+ self._print_message(self.format_usage(), file)
+
+ def print_help(self, file=None):
+ if file is None:
+ file = _sys.stdout
+ self._print_message(self.format_help(), file)
+
+ def print_version(self, file=None):
+ import warnings
+ warnings.warn(
+ 'The print_version method is deprecated -- the "version" '
+ 'argument to ArgumentParser is no longer supported.',
+ DeprecationWarning)
+ self._print_message(self.format_version(), file)
+
+ def _print_message(self, message, file=None):
+ if message:
+ if file is None:
+ file = _sys.stderr
+ file.write(message)
+
+ # ===============
+ # Exiting methods
+ # ===============
+ def exit(self, status=0, message=None):
+ if message:
+ self._print_message(message, _sys.stderr)
+ _sys.exit(status)
+
+ def error(self, message):
+ """error(message: string)
+
+ Prints a usage message incorporating the message to stderr and
+ exits.
+
+ If you override this in a subclass, it should not return -- it
+ should either exit or raise an exception.
+ """
+ self.print_usage(_sys.stderr)
+ self.exit(2, _('%s: error: %s\n') % (self.prog, message))
diff --git a/lib/spack/external/functools.py b/lib/spack/external/functools.py
new file mode 100644
index 0000000000..19f0903c82
--- /dev/null
+++ b/lib/spack/external/functools.py
@@ -0,0 +1,30 @@
+#
+# Backport of Python 2.7's total_ordering.
+#
+
+def total_ordering(cls):
+ """Class decorator that fills in missing ordering methods"""
+ convert = {
+ '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
+ ('__le__', lambda self, other: self < other or self == other),
+ ('__ge__', lambda self, other: not self < other)],
+ '__le__': [('__ge__', lambda self, other: not self <= other or self == other),
+ ('__lt__', lambda self, other: self <= other and not self == other),
+ ('__gt__', lambda self, other: not self <= other)],
+ '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
+ ('__ge__', lambda self, other: self > other or self == other),
+ ('__le__', lambda self, other: not self > other)],
+ '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
+ ('__gt__', lambda self, other: self >= other and not self == other),
+ ('__lt__', lambda self, other: not self >= other)]
+ }
+ roots = set(dir(cls)) & set(convert)
+ if not roots:
+ raise ValueError('must define at least one ordering operation: < > <= >=')
+ root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
+ for opname, opfunc in convert[root]:
+ if opname not in roots:
+ opfunc.__name__ = opname
+ opfunc.__doc__ = getattr(int, opname).__doc__
+ setattr(cls, opname, opfunc)
+ return cls
diff --git a/lib/spack/external/ordereddict.py b/lib/spack/external/ordereddict.py
new file mode 100644
index 0000000000..8ddad1477e
--- /dev/null
+++ b/lib/spack/external/ordereddict.py
@@ -0,0 +1,262 @@
+#
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+#
+# From http://code.activestate.com/recipes/576693-ordered-dictionary-for-py24/
+# This file is in the public domain, and has no particular license.
+#
+try:
+ from thread import get_ident as _get_ident
+except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+except ImportError:
+ pass
+
+
+class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running={}):
+ 'od.__repr__() <==> repr(od)'
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
diff --git a/lib/spack/external/pyqver2.py b/lib/spack/external/pyqver2.py
new file mode 100755
index 0000000000..cd45bf948f
--- /dev/null
+++ b/lib/spack/external/pyqver2.py
@@ -0,0 +1,393 @@
+#!/usr/bin/env python
+#
+# pyqver2.py
+# by Greg Hewgill
+# https://github.com/ghewgill/pyqver
+#
+# This software is provided 'as-is', without any express or implied
+# warranty. In no event will the author be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+# claim that you wrote the original software. If you use this software
+# in a product, an acknowledgment in the product documentation would be
+# appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+# misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+#
+# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com
+#
+
+import compiler
+import platform
+import sys
+
+StandardModules = {
+ "__future__": (2, 1),
+ "abc": (2, 6),
+ "argparse": (2, 7),
+ "ast": (2, 6),
+ "atexit": (2, 0),
+ "bz2": (2, 3),
+ "cgitb": (2, 2),
+ "collections": (2, 4),
+ "contextlib": (2, 5),
+ "cookielib": (2, 4),
+ "cProfile": (2, 5),
+ "csv": (2, 3),
+ "ctypes": (2, 5),
+ "datetime": (2, 3),
+ "decimal": (2, 4),
+ "difflib": (2, 1),
+ "DocXMLRPCServer": (2, 3),
+ "dummy_thread": (2, 3),
+ "dummy_threading": (2, 3),
+ "email": (2, 2),
+ "fractions": (2, 6),
+ "functools": (2, 5),
+ "future_builtins": (2, 6),
+ "hashlib": (2, 5),
+ "heapq": (2, 3),
+ "hmac": (2, 2),
+ "hotshot": (2, 2),
+ "HTMLParser": (2, 2),
+ "importlib": (2, 7),
+ "inspect": (2, 1),
+ "io": (2, 6),
+ "itertools": (2, 3),
+ "json": (2, 6),
+ "logging": (2, 3),
+ "modulefinder": (2, 3),
+ "msilib": (2, 5),
+ "multiprocessing": (2, 6),
+ "netrc": (1, 5, 2),
+ "numbers": (2, 6),
+ "optparse": (2, 3),
+ "ossaudiodev": (2, 3),
+ "pickletools": (2, 3),
+ "pkgutil": (2, 3),
+ "platform": (2, 3),
+ "pydoc": (2, 1),
+ "runpy": (2, 5),
+ "sets": (2, 3),
+ "shlex": (1, 5, 2),
+ "SimpleXMLRPCServer": (2, 2),
+ "spwd": (2, 5),
+ "sqlite3": (2, 5),
+ "ssl": (2, 6),
+ "stringprep": (2, 3),
+ "subprocess": (2, 4),
+ "sysconfig": (2, 7),
+ "tarfile": (2, 3),
+ "textwrap": (2, 3),
+ "timeit": (2, 3),
+ "unittest": (2, 1),
+ "uuid": (2, 5),
+ "warnings": (2, 1),
+ "weakref": (2, 1),
+ "winsound": (1, 5, 2),
+ "wsgiref": (2, 5),
+ "xml.dom": (2, 0),
+ "xml.dom.minidom": (2, 0),
+ "xml.dom.pulldom": (2, 0),
+ "xml.etree.ElementTree": (2, 5),
+ "xml.parsers.expat":(2, 0),
+ "xml.sax": (2, 0),
+ "xml.sax.handler": (2, 0),
+ "xml.sax.saxutils": (2, 0),
+ "xml.sax.xmlreader":(2, 0),
+ "xmlrpclib": (2, 2),
+ "zipfile": (1, 6),
+ "zipimport": (2, 3),
+ "_ast": (2, 5),
+ "_winreg": (2, 0),
+}
+
+Functions = {
+ "all": (2, 5),
+ "any": (2, 5),
+ "collections.Counter": (2, 7),
+ "collections.defaultdict": (2, 5),
+ "collections.OrderedDict": (2, 7),
+ "functools.total_ordering": (2, 7),
+ "enumerate": (2, 3),
+ "frozenset": (2, 4),
+ "itertools.compress": (2, 7),
+ "math.erf": (2, 7),
+ "math.erfc": (2, 7),
+ "math.expm1": (2, 7),
+ "math.gamma": (2, 7),
+ "math.lgamma": (2, 7),
+ "memoryview": (2, 7),
+ "next": (2, 6),
+ "os.getresgid": (2, 7),
+ "os.getresuid": (2, 7),
+ "os.initgroups": (2, 7),
+ "os.setresgid": (2, 7),
+ "os.setresuid": (2, 7),
+ "reversed": (2, 4),
+ "set": (2, 4),
+ "subprocess.check_call": (2, 5),
+ "subprocess.check_output": (2, 7),
+ "sum": (2, 3),
+ "symtable.is_declared_global": (2, 7),
+ "weakref.WeakSet": (2, 7),
+}
+
+Identifiers = {
+ "False": (2, 2),
+ "True": (2, 2),
+}
+
+def uniq(a):
+ if len(a) == 0:
+ return []
+ else:
+ return [a[0]] + uniq([x for x in a if x != a[0]])
+
+class NodeChecker(object):
+ def __init__(self):
+ self.vers = dict()
+ self.vers[(2,0)] = []
+ def add(self, node, ver, msg):
+ if ver not in self.vers:
+ self.vers[ver] = []
+ self.vers[ver].append((node.lineno, msg))
+ def default(self, node):
+ for child in node.getChildNodes():
+ self.visit(child)
+ def visitCallFunc(self, node):
+ def rollup(n):
+ if isinstance(n, compiler.ast.Name):
+ return n.name
+ elif isinstance(n, compiler.ast.Getattr):
+ r = rollup(n.expr)
+ if r:
+ return r + "." + n.attrname
+ name = rollup(node.node)
+ if name:
+ v = Functions.get(name)
+ if v is not None:
+ self.add(node, v, name)
+ self.default(node)
+ def visitClass(self, node):
+ if node.bases:
+ self.add(node, (2,2), "new-style class")
+ if node.decorators:
+ self.add(node, (2,6), "class decorator")
+ self.default(node)
+ def visitDictComp(self, node):
+ self.add(node, (2,7), "dictionary comprehension")
+ self.default(node)
+ def visitFloorDiv(self, node):
+ self.add(node, (2,2), "// operator")
+ self.default(node)
+ def visitFrom(self, node):
+ v = StandardModules.get(node.modname)
+ if v is not None:
+ self.add(node, v, node.modname)
+ for n in node.names:
+ name = node.modname + "." + n[0]
+ v = Functions.get(name)
+ if v is not None:
+ self.add(node, v, name)
+ def visitFunction(self, node):
+ if node.decorators:
+ self.add(node, (2,4), "function decorator")
+ self.default(node)
+ def visitGenExpr(self, node):
+ self.add(node, (2,4), "generator expression")
+ self.default(node)
+ def visitGetattr(self, node):
+ if (isinstance(node.expr, compiler.ast.Const)
+ and isinstance(node.expr.value, str)
+ and node.attrname == "format"):
+ self.add(node, (2,6), "string literal .format()")
+ self.default(node)
+ def visitIfExp(self, node):
+ self.add(node, (2,5), "inline if expression")
+ self.default(node)
+ def visitImport(self, node):
+ for n in node.names:
+ v = StandardModules.get(n[0])
+ if v is not None:
+ self.add(node, v, n[0])
+ self.default(node)
+ def visitName(self, node):
+ v = Identifiers.get(node.name)
+ if v is not None:
+ self.add(node, v, node.name)
+ self.default(node)
+ def visitSet(self, node):
+ self.add(node, (2,7), "set literal")
+ self.default(node)
+ def visitSetComp(self, node):
+ self.add(node, (2,7), "set comprehension")
+ self.default(node)
+ def visitTryFinally(self, node):
+ # try/finally with a suite generates a Stmt node as the body,
+ # but try/except/finally generates a TryExcept as the body
+ if isinstance(node.body, compiler.ast.TryExcept):
+ self.add(node, (2,5), "try/except/finally")
+ self.default(node)
+ def visitWith(self, node):
+ if isinstance(node.body, compiler.ast.With):
+ self.add(node, (2,7), "with statement with multiple contexts")
+ else:
+ self.add(node, (2,5), "with statement")
+ self.default(node)
+ def visitYield(self, node):
+ self.add(node, (2,2), "yield expression")
+ self.default(node)
+
+def get_versions(source):
+ """Return information about the Python versions required for specific features.
+
+ The return value is a dictionary with keys as a version number as a tuple
+ (for example Python 2.6 is (2,6)) and the value are a list of features that
+ require the indicated Python version.
+ """
+ tree = compiler.parse(source)
+ checker = compiler.walk(tree, NodeChecker())
+ return checker.vers
+
+def v27(source):
+ if sys.version_info >= (2, 7):
+ return qver(source)
+ else:
+ print >>sys.stderr, "Not all features tested, run --test with Python 2.7"
+ return (2, 7)
+
+def qver(source):
+ """Return the minimum Python version required to run a particular bit of code.
+
+ >>> qver('print "hello world"')
+ (2, 0)
+ >>> qver('class test(object): pass')
+ (2, 2)
+ >>> qver('yield 1')
+ (2, 2)
+ >>> qver('a // b')
+ (2, 2)
+ >>> qver('True')
+ (2, 2)
+ >>> qver('enumerate(a)')
+ (2, 3)
+ >>> qver('total = sum')
+ (2, 0)
+ >>> qver('sum(a)')
+ (2, 3)
+ >>> qver('(x*x for x in range(5))')
+ (2, 4)
+ >>> qver('class C:\\n @classmethod\\n def m(): pass')
+ (2, 4)
+ >>> qver('y if x else z')
+ (2, 5)
+ >>> qver('import hashlib')
+ (2, 5)
+ >>> qver('from hashlib import md5')
+ (2, 5)
+ >>> qver('import xml.etree.ElementTree')
+ (2, 5)
+ >>> qver('try:\\n try: pass;\\n except: pass;\\nfinally: pass')
+ (2, 0)
+ >>> qver('try: pass;\\nexcept: pass;\\nfinally: pass')
+ (2, 5)
+ >>> qver('from __future__ import with_statement\\nwith x: pass')
+ (2, 5)
+ >>> qver('collections.defaultdict(list)')
+ (2, 5)
+ >>> qver('from collections import defaultdict')
+ (2, 5)
+ >>> qver('"{0}".format(0)')
+ (2, 6)
+ >>> qver('memoryview(x)')
+ (2, 7)
+ >>> v27('{1, 2, 3}')
+ (2, 7)
+ >>> v27('{x for x in s}')
+ (2, 7)
+ >>> v27('{x: y for x in s}')
+ (2, 7)
+ >>> qver('from __future__ import with_statement\\nwith x:\\n with y: pass')
+ (2, 5)
+ >>> v27('from __future__ import with_statement\\nwith x, y: pass')
+ (2, 7)
+ >>> qver('@decorator\\ndef f(): pass')
+ (2, 4)
+ >>> qver('@decorator\\nclass test:\\n pass')
+ (2, 6)
+
+ #>>> qver('0o0')
+ #(2, 6)
+ #>>> qver('@foo\\nclass C: pass')
+ #(2, 6)
+ """
+ return max(get_versions(source).keys())
+
+
+if __name__ == '__main__':
+
+ Verbose = False
+ MinVersion = (2, 3)
+ Lint = False
+
+ files = []
+ i = 1
+ while i < len(sys.argv):
+ a = sys.argv[i]
+ if a == "--test":
+ import doctest
+ doctest.testmod()
+ sys.exit(0)
+ if a == "-v" or a == "--verbose":
+ Verbose = True
+ elif a == "-l" or a == "--lint":
+ Lint = True
+ elif a == "-m" or a == "--min-version":
+ i += 1
+ MinVersion = tuple(map(int, sys.argv[i].split(".")))
+ else:
+ files.append(a)
+ i += 1
+
+ if not files:
+ print >>sys.stderr, """Usage: %s [options] source ...
+
+ Report minimum Python version required to run given source files.
+
+ -m x.y or --min-version x.y (default 2.3)
+ report version triggers at or above version x.y in verbose mode
+ -v or --verbose
+ print more detailed report of version triggers for each version
+ """ % sys.argv[0]
+ sys.exit(1)
+
+ for fn in files:
+ try:
+ f = open(fn)
+ source = f.read()
+ f.close()
+ ver = get_versions(source)
+ if Verbose:
+ print fn
+ for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
+ reasons = [x for x in uniq(ver[v]) if x]
+ if reasons:
+ # each reason is (lineno, message)
+ print "\t%s\t%s" % (".".join(map(str, v)), ", ".join([x[1] for x in reasons]))
+ elif Lint:
+ for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
+ reasons = [x for x in uniq(ver[v]) if x]
+ for r in reasons:
+ # each reason is (lineno, message)
+ print "%s:%s: %s %s" % (fn, r[0], ".".join(map(str, v)), r[1])
+ else:
+ print "%s\t%s" % (".".join(map(str, max(ver.keys()))), fn)
+ except SyntaxError, x:
+ print "%s: syntax error compiling with Python %s: %s" % (fn, platform.python_version(), x)
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 50fe453cfb..bf91a885ca 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -22,20 +22,6 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
-#
-# When packages call 'from spack import *', this is what is brought in.
-#
-# Spack internal code calls 'import spack' and accesses other
-# variables (spack.db, paths, etc.) directly.
-#
-# TODO: maybe this should be separated out and should go in build_environment.py?
-# TODO: it's not clear where all the stuff that needs to be included in packages
-# should live. This file is overloaded for spack core vs. for packages.
-__all__ = ['Package', 'when', 'provides', 'depends_on', 'version',
- 'patch', 'Version', 'working_dir', 'which', 'Executable',
- 'filter_file', 'change_sed_delimiter']
-
import os
import tempfile
from llnl.util.filesystem import *
@@ -58,7 +44,6 @@ var_path = join_path(prefix, "var", "spack")
stage_path = join_path(var_path, "stage")
install_path = join_path(prefix, "opt")
share_path = join_path(prefix, "share", "spack")
-dotkit_path = join_path(share_path, "dotkit")
#
# Set up the packages database.
@@ -81,7 +66,7 @@ mock_user_config = join_path(mock_config_path, "user_spackconfig")
# stage directories.
#
from spack.directory_layout import SpecHashDirectoryLayout
-install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
+install_layout = SpecHashDirectoryLayout(install_path)
#
# This controls how things are concretized in spack.
@@ -141,11 +126,30 @@ do_checksum = True
#
sys_type = None
+
+#
+# When packages call 'from spack import *', this extra stuff is brought in.
+#
+# Spack internal code should call 'import spack' and accesses other
+# variables (spack.db, paths, etc.) directly.
#
-# Extra imports that should be generally usable from package.py files.
+# TODO: maybe this should be separated out and should go in build_environment.py?
+# TODO: it's not clear where all the stuff that needs to be included in packages
+# should live. This file is overloaded for spack core vs. for packages.
#
-from llnl.util.filesystem import working_dir
+__all__ = ['Package', 'Version', 'when']
from spack.package import Package
-from spack.relations import *
-from spack.multimethod import when
from spack.version import Version
+from spack.multimethod import when
+
+import llnl.util.filesystem
+from llnl.util.filesystem import *
+__all__ += llnl.util.filesystem.__all__
+
+import spack.relations
+from spack.relations import *
+__all__ += spack.relations.__all__
+
+import spack.util.executable
+from spack.util.executable import *
+__all__ += spack.util.executable.__all__
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 38d5f70282..182a5629fa 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -122,7 +122,7 @@ def set_build_environment_variables(pkg):
# Prefixes of all of the package's dependencies go in
# SPACK_DEPENDENCIES
- dep_prefixes = [d.package.prefix for d in pkg.spec.dependencies.values()]
+ dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
path_set(SPACK_DEPENDENCIES, dep_prefixes)
# Install prefix
@@ -143,6 +143,10 @@ def set_build_environment_variables(pkg):
os.environ[SPACK_SPEC] = str(pkg.spec)
os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
+ # Add dependencies to CMAKE_PREFIX_PATH
+ dep_prefixes = [d.package.prefix for d in pkg.spec.dependencies.values()]
+ path_set("CMAKE_PREFIX_PATH", dep_prefixes)
+
def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions.
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index 31c908d42b..f75b68b00a 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -23,12 +23,13 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from subprocess import check_call, check_output
+from subprocess import check_call
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
+from llnl.util.filesystem import join_path, mkdirp
import spack
+from spack.util.executable import which
description = "Create a new installation of spack in another prefix"
@@ -38,8 +39,10 @@ def setup_parser(subparser):
def get_origin_url():
git_dir = join_path(spack.prefix, '.git')
- origin_url = check_output(
- ['git', '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url'])
+ git = which('git', required=True)
+ origin_url = git(
+ '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
+ return_output=True)
return origin_url.strip()
@@ -49,6 +52,11 @@ def bootstrap(parser, args):
tty.msg("Fetching spack from origin: %s" % origin_url)
+ if os.path.isfile(prefix):
+ tty.die("There is already a file at %s" % prefix)
+
+ mkdirp(prefix)
+
if os.path.exists(join_path(prefix, '.git')):
tty.die("There already seems to be a git repository in %s" % prefix)
@@ -62,10 +70,11 @@ def bootstrap(parser, args):
"%s/lib/spack/..." % prefix)
os.chdir(prefix)
- check_call(['git', 'init', '--shared', '-q'])
- check_call(['git', 'remote', 'add', 'origin', origin_url])
- check_call(['git', 'fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q'])
- check_call(['git', 'reset', '--hard', 'origin/master', '-q'])
+ git = which('git', required=True)
+ git('init', '--shared', '-q')
+ git('remote', 'add', 'origin', origin_url)
+ git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
+ git('reset', '--hard', 'origin/master', '-q')
tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix)
diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py
new file mode 100644
index 0000000000..24d56db7d0
--- /dev/null
+++ b/lib/spack/spack/cmd/cd.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.cmd.location
+import spack.modules
+
+description="cd to spack directories in the shell."
+
+def setup_parser(subparser):
+ """This is for decoration -- spack cd is used through spack's
+ shell support. This allows spack cd to print a descriptive
+ help message when called with -h."""
+ spack.cmd.location.setup_parser(subparser)
+
+
+def cd(parser, args):
+ spack.modules.print_help()
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index f5cf0d0143..5a8109b70f 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -24,7 +24,7 @@
##############################################################################
import os
import re
-import argparse
+from external import argparse
import hashlib
from pprint import pprint
from subprocess import CalledProcessError
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index 6091cae6c8..1df9d87ae2 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index a4cd2df7e2..ac9c844a4c 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index 85f9642019..283bfc19b9 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-import argparse
+from external import argparse
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index db6be88d32..652f243b98 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -22,14 +22,14 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
-description = "Show dependent packages."
+description = "Show installed packages that depend on another."
def setup_parser(subparser):
subparser.add_argument(
@@ -42,5 +42,5 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
- deps = [d.format(fmt) for d in specs[0].package.installed_dependents]
- tty.msg("Dependents of %s" % specs[0].format(fmt), *deps)
+ deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
+ tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
diff --git a/lib/spack/spack/cmd/dotkit.py b/lib/spack/spack/cmd/dotkit.py
deleted file mode 100644
index 7a691ae5c0..0000000000
--- a/lib/spack/spack/cmd/dotkit.py
+++ /dev/null
@@ -1,99 +0,0 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://scalability-llnl.github.io/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import sys
-import os
-import shutil
-import argparse
-
-import llnl.util.tty as tty
-from llnl.util.lang import partition_list
-from llnl.util.filesystem import mkdirp
-
-import spack.cmd
-import spack.hooks.dotkit
-from spack.spec import Spec
-
-
-description ="Find dotkits for packages if they exist."
-
-def setup_parser(subparser):
- subparser.add_argument(
- '--refresh', action='store_true', help='Regenerate all dotkits')
-
- subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='spec to find a dotkit for.')
-
-
-def dotkit_find(parser, args):
- if not args.spec:
- parser.parse_args(['dotkit', '-h'])
-
- spec = spack.cmd.parse_specs(args.spec)
- if len(spec) > 1:
- tty.die("You can only pass one spec.")
- spec = spec[0]
-
- if not spack.db.exists(spec.name):
- tty.die("No such package: %s" % spec.name)
-
- specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)]
-
- if len(specs) == 0:
- tty.die("No installed packages match spec %s" % spec)
-
- if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % spec)
- for s in specs:
- sys.stderr.write(s.tree(color=True))
- sys.exit(1)
-
- match = specs[0]
- if not os.path.isfile(spack.hooks.dotkit.dotkit_file(match.package)):
- tty.die("No dotkit is installed for package %s." % spec)
-
- print match.format('$_$@$+$%@$=$#')
-
-
-def dotkit_refresh(parser, args):
- query_specs = spack.cmd.parse_specs(args.spec)
-
- specs = spack.db.installed_package_specs()
- if query_specs:
- specs = [s for s in specs
- if any(s.satisfies(q) for q in query_specs)]
- else:
- shutil.rmtree(spack.dotkit_path, ignore_errors=False)
- mkdirp(spack.dotkit_path)
-
- for spec in specs:
- spack.hooks.dotkit.post_install(spec.package)
-
-
-
-def dotkit(parser, args):
- if args.refresh:
- dotkit_refresh(parser, args)
- else:
- dotkit_find(parser, args)
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index 1dd8703daf..0ccebd9486 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import spack
import spack.cmd
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 7f2bce119e..2238484a21 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -24,7 +24,7 @@
##############################################################################
import sys
import collections
-import argparse
+from external import argparse
from StringIO import StringIO
import llnl.util.tty as tty
@@ -89,7 +89,7 @@ def find(parser, args):
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
- print format % (abbrv, spec.package.prefix)
+ print format % (abbrv, spec.prefix)
elif args.full_specs:
for spec in specs:
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 648dbf905a..bb147b30f5 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -37,15 +37,17 @@ def info(parser, args):
package = spack.db.get(args.name)
print "Package: ", package.name
print "Homepage: ", package.homepage
- print "Download: ", package.url
print
print "Safe versions: "
- if package.versions:
- colify(reversed(sorted(package.versions)), indent=4)
+ if not package.versions:
+ print("None.")
else:
- print "None. Use spack versions %s to get a list of downloadable versions." % package.name
+ maxlen = max(len(str(v)) for v in package.versions)
+ fmt = "%%-%ss" % maxlen
+ for v in reversed(sorted(package.versions)):
+ print " " + (fmt % v) + " " + package.url_for_version(v)
print
print "Dependencies:"
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 4570d6c40f..2374d02feb 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-import argparse
+from external import argparse
import spack
import spack.cmd
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
new file mode 100644
index 0000000000..06574d9725
--- /dev/null
+++ b/lib/spack/spack/cmd/load.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by David Beckingsale, david@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+import spack.modules
+
+description ="Add package to environment using modules."
+
+def setup_parser(subparser):
+ """Parser is only constructed so that this prints a nice help
+ message with -h. """
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.')
+
+
+def load(parser, args):
+ spack.modules.print_help()
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
new file mode 100644
index 0000000000..074d984ee6
--- /dev/null
+++ b/lib/spack/spack/cmd/location.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from external import argparse
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import join_path
+
+import spack
+import spack.cmd
+
+description="Print out locations of various diectories used by Spack"
+
+def setup_parser(subparser):
+ global directories
+ directories = subparser.add_mutually_exclusive_group()
+
+ directories.add_argument(
+ '-m', '--module-dir', action='store_true', help="Spack python module directory.")
+ directories.add_argument(
+ '-r', '--spack-root', action='store_true', help="Spack installation root.")
+
+ directories.add_argument(
+ '-i', '--install-dir', action='store_true',
+ help="Install prefix for spec (spec need not be installed).")
+ directories.add_argument(
+ '-p', '--package-dir', action='store_true',
+ help="Directory enclosing a spec's package.py file.")
+ directories.add_argument(
+ '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
+ directories.add_argument(
+ '-b', '--build-dir', action='store_true',
+ help="Expanded archive directory for a spec (requires it to be staged first).")
+
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
+
+
+def location(parser, args):
+ if args.module_dir:
+ print spack.module_path
+
+ elif args.spack_root:
+ print spack.prefix
+
+ else:
+ specs = spack.cmd.parse_specs(args.spec, concretize=True)
+ if not specs:
+ tty.die("You must supply a spec.")
+ if len(specs) != 1:
+ tty.die("Too many specs. Need only one.")
+ spec = specs[0]
+
+ if args.install_dir:
+ print spec.prefix
+
+ elif args.package_dir:
+ print join_path(spack.db.root, spec.name)
+
+ else:
+ pkg = spack.db.get(spec)
+
+ if args.stage_dir:
+ print pkg.stage.path
+
+ else: # args.build_dir is the default.
+ if not os.listdir(pkg.stage.path):
+ tty.die("Build directory does not exist yet. Run this to create it:",
+ "spack stage " + " ".join(args.spec))
+ print pkg.stage.expanded_archive_path
+
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 129ac6bd45..b42b329085 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -24,10 +24,10 @@
##############################################################################
import os
import shutil
-import argparse
from datetime import datetime
from contextlib import closing
+from external import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.filesystem import mkdirp, join_path
@@ -41,7 +41,7 @@ from spack.stage import Stage
from spack.util.compression import extension
-description = "Manage spack mirrors."
+description = "Manage mirrors."
def setup_parser(subparser):
subparser.add_argument(
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
new file mode 100644
index 0000000000..34f0855a50
--- /dev/null
+++ b/lib/spack/spack/cmd/module.py
@@ -0,0 +1,107 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import sys
+import os
+import shutil
+from external import argparse
+
+import llnl.util.tty as tty
+from llnl.util.lang import partition_list
+from llnl.util.filesystem import mkdirp
+
+import spack.cmd
+from spack.modules import module_types
+from spack.util.string import *
+
+from spack.spec import Spec
+
+description ="Manipulate modules and dotkits."
+
+
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
+
+ refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.')
+
+ find_parser = sp.add_parser('find', help='Find module files for packages.')
+ find_parser.add_argument(
+ 'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]")
+ find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.')
+
+
+def module_find(mtype, spec_array):
+ """Look at all installed packages and see if the spec provided
+ matches any. If it does, check whether there is a module file
+ of type <mtype> there, and print out the name that the user
+ should type to use that package's module.
+ """
+ if mtype not in module_types:
+ tty.die("Invalid module type: '%s'. Options are %s." % (mtype, comma_or(module_types)))
+
+ specs = spack.cmd.parse_specs(spec_array)
+ if len(specs) > 1:
+ tty.die("You can only pass one spec.")
+ spec = specs[0]
+
+ specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)]
+ if len(specs) == 0:
+ tty.die("No installed packages match spec %s" % spec)
+
+ if len(specs) > 1:
+ tty.error("Multiple matches for spec %s. Choose one:" % spec)
+ for s in specs:
+ sys.stderr.write(s.tree(color=True))
+ sys.exit(1)
+
+ mt = module_types[mtype]
+ mod = mt(specs[0])
+ if not os.path.isfile(mod.file_name):
+ tty.die("No %s module is installed for %s." % (mtype, spec))
+
+ print mod.use_name
+
+
+def module_refresh():
+ """Regenerate all module files for installed packages known to
+ spack (some packages may no longer exist)."""
+ specs = [s for s in spack.db.installed_known_package_specs()]
+
+ for name, cls in module_types.items():
+ tty.msg("Regenerating %s module files." % name)
+ if os.path.isdir(cls.path):
+ shutil.rmtree(cls.path, ignore_errors=False)
+ mkdirp(cls.path)
+ for spec in specs:
+ tty.debug(" Writing file for %s." % spec)
+ cls(spec).write()
+
+
+
+def module(parser, args):
+ if args.module_command == 'refresh':
+ module_refresh()
+
+ elif args.module_command == 'find':
+ module_find(args.module_type, args.spec)
diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py
index 2356583b07..a6556c4828 100644
--- a/lib/spack/spack/cmd/patch.py
+++ b/lib/spack/spack/cmd/patch.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import spack.cmd
import spack
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index 1a652c82d1..2bcdc9fba2 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import argparse
+from external import argparse
from llnl.util.tty.colify import colify
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index 641394044c..86b8c827f8 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -25,7 +25,7 @@
import os
import sys
import code
-import argparse
+from external import argparse
import platform
from contextlib import closing
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index fb5a900c3f..5fcb0a9b5a 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import spack.cmd
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index 1bf1f93c2f..c8bc473c55 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -22,8 +22,10 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+import os
+from external import argparse
+import llnl.util.tty as tty
import spack
import spack.cmd
@@ -33,18 +35,21 @@ def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
+
+ dir_parser = subparser.add_mutually_exclusive_group()
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
+ 'specs', nargs=argparse.REMAINDER, help="specs of packages to stage")
def stage(parser, args):
- if not args.packages:
+ if not args.specs:
tty.die("stage requires at least one package argument")
if args.no_checksum:
spack.do_checksum = False
- specs = spack.cmd.parse_specs(args.packages, concretize=True)
+ specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
package = spack.db.get(spec)
package.do_stage()
+
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 73c98a203b..84eb4703a6 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from external import argparse
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py
new file mode 100644
index 0000000000..6442c48cb1
--- /dev/null
+++ b/lib/spack/spack/cmd/unload.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by David Beckingsale, david@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+import spack.modules
+
+description ="Remove package from environment using module."
+
+def setup_parser(subparser):
+ """Parser is only constructed so that this prints a nice help
+ message with -h. """
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
+
+
+def unload(parser, args):
+ spack.modules.print_help()
diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py
index a31e16d11a..2a7229a3a0 100644
--- a/lib/spack/spack/cmd/unuse.py
+++ b/lib/spack/spack/cmd/unuse.py
@@ -22,15 +22,17 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
-import spack.cmd.use
+from external import argparse
+import spack.modules
description ="Remove package from environment using dotkit."
def setup_parser(subparser):
+ """Parser is only constructed so that this prints a nice help
+ message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to remove.')
+ 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
def unuse(parser, args):
- spack.cmd.use.print_help()
+ spack.modules.print_help()
diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py
index 10a0644df8..e34c194739 100644
--- a/lib/spack/spack/cmd/use.py
+++ b/lib/spack/spack/cmd/use.py
@@ -22,29 +22,17 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
-import llnl.util.tty as tty
-import spack
+from external import argparse
+import spack.modules
description ="Add package to environment using dotkit."
def setup_parser(subparser):
+ """Parser is only constructed so that this prints a nice help
+ message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to add.')
-
-
-def print_help():
- tty.msg("Spack dotkit support is not initialized.",
- "",
- "To use dotkit with Spack, you must first run the command",
- "below, which you can copy and paste:",
- "",
- "For bash:",
- " . %s/setup-env.bash" % spack.share_path,
- "",
- "ksh/csh/tcsh shells are currently unsupported",
- "")
+ 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
def use(parser, args):
- print_help()
+ spack.modules.print_help()
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 716356bdd2..582f49eaf2 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -189,7 +189,7 @@ class Compiler(object):
return None
successful = [key for key in parmap(check, checks) if key is not None]
- return { (v, p, s) : path for v, p, s, path in successful }
+ return dict(((v, p, s), path) for v, p, s, path in successful)
@classmethod
def find(cls, *path):
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index c5bfe21ed4..467472cced 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -176,7 +176,7 @@ def compilers_for_spec(compiler_spec):
config = _get_config()
def get_compiler(cspec):
- items = { k:v for k,v in config.items('compiler "%s"' % cspec) }
+ items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec))
if not all(n in items for n in _required_instance_vars):
raise InvalidCompilerConfigurationError(cspec)
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index eb497711b7..e6d1bb87d4 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -118,7 +118,7 @@ class DefaultConcretizer(object):
return
try:
- nearest = next(p for p in spec.preorder_traversal(direction='parents')
+ nearest = next(p for p in spec.traverse(direction='parents')
if p.compiler is not None).compiler
if not nearest in all_compilers:
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 5494adc324..85ee16a1c2 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -84,10 +84,9 @@ import os
import re
import inspect
import ConfigParser as cp
-from collections import OrderedDict
+from external.ordereddict import OrderedDict
from llnl.util.lang import memoized
-
import spack.error
__all__ = [
@@ -222,7 +221,6 @@ class SpackConfigParser(cp.RawConfigParser):
"""
# Slightly modify Python option expressions to allow leading whitespace
OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern)
- OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern)
def __init__(self, file_or_files):
cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
@@ -341,14 +339,13 @@ class SpackConfigParser(cp.RawConfigParser):
def _read(self, fp, fpname):
- """This is a copy of Python 2.7's _read() method, with support for
- continuation lines removed.
- """
- cursect = None # None, or a dictionary
+ """This is a copy of Python 2.6's _read() method, with support for
+ continuation lines removed."""
+ cursect = None # None, or a dictionary
optname = None
- lineno = 0
comment = 0
- e = None # None, or an exception
+ lineno = 0
+ e = None # None, or an exception
while True:
line = fp.readline()
if not line:
@@ -359,7 +356,6 @@ class SpackConfigParser(cp.RawConfigParser):
(line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")):
self._sections["comment-%d" % comment] = line
comment += 1
- continue
# a section header or option header?
else:
# is it a section header?
@@ -381,27 +377,21 @@ class SpackConfigParser(cp.RawConfigParser):
raise cp.MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
- mo = self._optcre.match(line)
+ mo = self.OPTCRE.match(line)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
+ if vi in ('=', ':') and ';' in optval:
+ # ';' is a comment delimiter only if it follows
+ # a spacing character
+ pos = optval.find(';')
+ if pos != -1 and optval[pos-1].isspace():
+ optval = optval[:pos]
+ optval = optval.strip()
+ # allow empty values
+ if optval == '""':
+ optval = ''
optname = self.optionxform(optname.rstrip())
- # This check is fine because the OPTCRE cannot
- # match if it would set optval to None
- if optval is not None:
- if vi in ('=', ':') and ';' in optval:
- # ';' is a comment delimiter only if it follows
- # a spacing character
- pos = optval.find(';')
- if pos != -1 and optval[pos-1].isspace():
- optval = optval[:pos]
- optval = optval.strip()
- # allow empty values
- if optval == '""':
- optval = ''
- cursect[optname] = [optval]
- else:
- # valueless option handling
- cursect[optname] = optval
+ cursect[optname] = optval
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
@@ -414,23 +404,13 @@ class SpackConfigParser(cp.RawConfigParser):
if e:
raise e
- # join the multi-line values collected while reading
- all_sections = [self._defaults]
- all_sections.extend(self._sections.values())
- for options in all_sections:
- # skip comments
- if isinstance(options, basestring):
- continue
- for name, val in options.items():
- if isinstance(val, list):
- options[name] = '\n'.join(val)
def _write(self, fp):
"""Write an .ini-format representation of the configuration state.
- This is taken from the default Python 2.7 source. It writes 4
+ This is taken from the default Python 2.6 source. It writes 4
spaces at the beginning of lines instead of no leading space.
"""
if self._defaults:
@@ -449,11 +429,9 @@ class SpackConfigParser(cp.RawConfigParser):
# Allow leading whitespace
fp.write("[%s]\n" % section)
for (key, value) in self._sections[section].items():
- if key == "__name__":
- continue
- if (value is not None) or (self._optcre == self.OPTCRE):
- key = " = ".join((key, str(value).replace('\n', '\n\t')))
- fp.write(" %s\n" % (key))
+ if key != "__name__":
+ fp.write(" %s = %s\n" %
+ (key, str(value).replace('\n', '\n\t')))
class SpackConfigurationError(spack.error.SpackError):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 4fc00d536e..9b31aad5fe 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -29,7 +29,10 @@ import hashlib
import shutil
from contextlib import closing
+import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
+
+import spack
from spack.spec import Spec
from spack.error import SpackError
@@ -131,12 +134,9 @@ class SpecHashDirectoryLayout(DirectoryLayout):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
- prefix_size = kwargs.get('prefix_size', 8)
- spec_file = kwargs.get('spec_file', '.spec')
-
+ spec_file_name = kwargs.get('spec_file_name', '.spec')
super(SpecHashDirectoryLayout, self).__init__(root)
- self.prefix_size = prefix_size
- self.spec_file = spec_file
+ self.spec_file_name = spec_file_name
def relative_path_for_spec(self, spec):
@@ -154,16 +154,36 @@ class SpecHashDirectoryLayout(DirectoryLayout):
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
with closing(open(path)) as spec_file:
- string = spec_file.read().replace('\n', '')
# Specs from files are assumed normal and concrete
- return Spec(string, concrete=True)
+ spec = Spec(spec_file.read().replace('\n', ''))
+
+ # If we do not have a package on hand for this spec, we know
+ # it is concrete, and we *assume* that it is normal. This
+ # prevents us from trying to fetch a non-existing package, and
+ # allows best effort for commands like spack find.
+ if not spack.db.exists(spec.name):
+ spec._normal = True
+ spec._concrete = True
+ else:
+ spec.normalize()
+ if not spec.concrete:
+ tty.warn("Spec read from installed package is not concrete:",
+ path, spec)
+
+ return spec
+
+
+ def spec_file_path(self, spec):
+ """Gets full path to spec file"""
+ _check_concrete(spec)
+ return join_path(self.path_for_spec(spec), self.spec_file_name)
def make_path_for_spec(self, spec):
_check_concrete(spec)
path = self.path_for_spec(spec)
- spec_file_path = join_path(path, self.spec_file)
+ spec_file_path = self.spec_file_path(spec)
if os.path.isdir(path):
if not os.path.isfile(spec_file_path):
@@ -177,8 +197,7 @@ class SpecHashDirectoryLayout(DirectoryLayout):
spec_hash = self.hash_spec(spec)
installed_hash = self.hash_spec(installed_spec)
if installed_spec == spec_hash:
- raise SpecHashCollisionError(
- installed_hash, spec_hash, self.prefix_size)
+ raise SpecHashCollisionError(installed_hash, spec_hash)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match SHA-1 hash!'
@@ -195,7 +214,7 @@ class SpecHashDirectoryLayout(DirectoryLayout):
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_file_path = join_path(
- self.root, arch, compiler, last_dir, self.spec_file)
+ self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
yield spec
@@ -209,10 +228,10 @@ class DirectoryLayoutError(SpackError):
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an SpecHashDirectoryLayout."""
- def __init__(self, installed_spec, new_spec, prefix_size):
+ def __init__(self, installed_spec, new_spec):
super(SpecHashDirectoryLayout, self).__init__(
- 'Specs %s and %s have the same %d character SHA-1 prefix!'
- % prefix_size, installed_spec, new_spec)
+ 'Specs %s and %s have the same SHA-1 prefix!'
+ % installed_spec, new_spec)
class InconsistentInstallDirectoryError(DirectoryLayoutError):
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index 40e0e75fdb..8bcbd83c28 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -28,7 +28,8 @@ class SpackError(Exception):
Subclasses can be found in the modules they have to do with.
"""
def __init__(self, message, long_message=None):
- super(SpackError, self).__init__(message)
+ super(SpackError, self).__init__()
+ self.message = message
self.long_message = long_message
diff --git a/lib/spack/spack/hooks/dotkit.py b/lib/spack/spack/hooks/dotkit.py
index 10b7732353..4e748ff80a 100644
--- a/lib/spack/spack/hooks/dotkit.py
+++ b/lib/spack/spack/hooks/dotkit.py
@@ -22,62 +22,14 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
-import re
-import textwrap
-import shutil
-from contextlib import closing
-
-from llnl.util.filesystem import join_path, mkdirp
-
-import spack
-
-
-def dotkit_file(pkg):
- dk_file_name = pkg.spec.format('$_$@$%@$+$=$#') + ".dk"
- return join_path(spack.dotkit_path, dk_file_name)
+import spack.modules
def post_install(pkg):
- if not os.path.exists(spack.dotkit_path):
- mkdirp(spack.dotkit_path)
-
- alterations = []
- for var, path in [
- ('PATH', pkg.prefix.bin),
- ('MANPATH', pkg.prefix.man),
- ('MANPATH', pkg.prefix.share_man),
- ('LD_LIBRARY_PATH', pkg.prefix.lib),
- ('LD_LIBRARY_PATH', pkg.prefix.lib64)]:
-
- if os.path.isdir(path):
- alterations.append("dk_alter %s %s\n" % (var, path))
-
- if not alterations:
- return
-
- alterations.append("dk_alter CMAKE_PREFIX_PATH %s\n" % pkg.prefix)
-
- dk_file = dotkit_file(pkg)
- with closing(open(dk_file, 'w')) as dk:
- # Put everything in the spack category.
- dk.write('#c spack\n')
-
- dk.write('#d %s\n' % pkg.spec.format("$_ $@"))
-
- # Recycle the description
- if pkg.__doc__:
- doc = re.sub(r'\s+', ' ', pkg.__doc__)
- for line in textwrap.wrap(doc, 72):
- dk.write("#h %s\n" % line)
-
- # Write alterations
- for alter in alterations:
- dk.write(alter)
+ dk = spack.modules.Dotkit(pkg.spec)
+ dk.write()
def post_uninstall(pkg):
- dk_file = dotkit_file(pkg)
- if os.path.exists(dk_file):
- shutil.rmtree(dk_file, ignore_errors=True)
-
+ dk = spack.modules.Dotkit(pkg.spec)
+ dk.remove()
diff --git a/lib/spack/spack/hooks/tclmodule.py b/lib/spack/spack/hooks/tclmodule.py
new file mode 100644
index 0000000000..0b9fd5a67c
--- /dev/null
+++ b/lib/spack/spack/hooks/tclmodule.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by David Beckingsale, david@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.modules
+
+
+def post_install(pkg):
+ dk = spack.modules.TclModule(pkg.spec)
+ dk.write()
+
+
+def post_uninstall(pkg):
+ dk = spack.modules.TclModule(pkg.spec)
+ dk.remove()
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
new file mode 100644
index 0000000000..755e9ea900
--- /dev/null
+++ b/lib/spack/spack/modules.py
@@ -0,0 +1,247 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""This module contains code for creating environment modules, which
+can include dotkits, tcl modules, lmod, and others.
+
+The various types of modules are installed by post-install hooks and
+removed after an uninstall by post-uninstall hooks. This class
+consolidates the logic for creating an abstract description of the
+information that module systems need. Currently that includes a
+number directories to be appended to paths in the user's environment:
+
+ * /bin directories to be appended to PATH
+ * /lib* directories for LD_LIBRARY_PATH
+ * /man* and /share/man* directories for LD_LIBRARY_PATH
+ * the package prefix for CMAKE_PREFIX_PATH
+
+This module also includes logic for coming up with unique names for
+the module files so that they can be found by the various
+shell-support files in $SPACK/share/spack/setup-env.*.
+
+Each hook in hooks/ implements the logic for writing its specific type
+of module file.
+"""
+__all__ = ['EnvModule', 'Dotkit', 'TclModule']
+
+import os
+import re
+import textwrap
+import shutil
+from contextlib import closing
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import join_path, mkdirp
+
+import spack
+
+"""Registry of all types of modules. Entries created by EnvModule's
+ metaclass."""
+module_types = {}
+
+
+def print_help():
+ """For use by commands to tell user how to activate shell support."""
+
+ tty.msg("This command requires spack's shell integration.",
+ "",
+ "To initialize spack's shell commands, you must run one of",
+ "the commands below. Choose the right command for your shell.",
+ "",
+ "For bash and zsh:",
+ " . %s/setup-env.sh" % spack.share_path,
+ "",
+ "For csh and tcsh:",
+ " setenv SPACK_ROOT %s" % spack.prefix,
+ " source %s/setup-env.csh" % spack.share_path,
+ "")
+
+
+class EnvModule(object):
+ name = 'env_module'
+
+ class __metaclass__(type):
+ def __init__(cls, name, bases, dict):
+ type.__init__(cls, name, bases, dict)
+ if cls.name != 'env_module':
+ module_types[cls.name] = cls
+
+
+ def __init__(self, spec=None):
+ # category in the modules system
+ # TODO: come up with smarter category names.
+ self.category = "spack"
+
+ # Descriptions for the module system's UI
+ self.short_description = ""
+ self.long_description = ""
+
+ # dict pathname -> list of directories to be prepended to in
+ # the module file.
+ self._paths = None
+ self.spec = spec
+
+
+ @property
+ def paths(self):
+ if self._paths is None:
+ self._paths = {}
+
+ def add_path(path_name, directory):
+ path = self._paths.setdefault(path_name, [])
+ path.append(directory)
+
+ # Add paths if they exist.
+ for var, directory in [
+ ('PATH', self.spec.prefix.bin),
+ ('MANPATH', self.spec.prefix.man),
+ ('MANPATH', self.spec.prefix.share_man),
+ ('LD_LIBRARY_PATH', self.spec.prefix.lib),
+ ('LD_LIBRARY_PATH', self.spec.prefix.lib64)]:
+
+ if os.path.isdir(directory):
+ add_path(var, directory)
+
+ # short description is just the package + version
+ # TODO: maybe packages can optionally provide it.
+ self.short_description = self.spec.format("$_ $@")
+
+ # long description is the docstring with reduced whitespace.
+ if self.spec.package.__doc__:
+ self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
+
+ return self._paths
+
+
+ def write(self):
+ """Write out a module file for this object."""
+ module_dir = os.path.dirname(self.file_name)
+ if not os.path.exists(module_dir):
+ mkdirp(module_dir)
+
+ # If there are no paths, no need for a dotkit.
+ if not self.paths:
+ return
+
+ with closing(open(self.file_name, 'w')) as f:
+ self._write(f)
+
+
+ def _write(self, stream):
+ """To be implemented by subclasses."""
+ raise NotImplementedError()
+
+
+ @property
+ def file_name(self):
+ """Subclasses should implement this to return the name of the file
+ where this module lives."""
+ raise NotImplementedError()
+
+
+ @property
+ def use_name(self):
+ """Subclasses should implement this to return the name the
+ module command uses to refer to the package."""
+ raise NotImplementedError()
+
+
+ def remove(self):
+ mod_file = self.file_name
+ if os.path.exists(mod_file):
+ shutil.rmtree(mod_file, ignore_errors=True)
+
+
+class Dotkit(EnvModule):
+ name = 'dotkit'
+ path = join_path(spack.share_path, "dotkit")
+
+ @property
+ def file_name(self):
+ return join_path(Dotkit.path, self.spec.architecture,
+ self.spec.format('$_$@$%@$+$#.dk'))
+
+ @property
+ def use_name(self):
+ return self.spec.format('$_$@$%@$+$#')
+
+
+ def _write(self, dk_file):
+ # Category
+ if self.category:
+ dk_file.write('#c %s\n' % self.category)
+
+ # Short description
+ if self.short_description:
+ dk_file.write('#d %s\n' % self.short_description)
+
+ # Long description
+ if self.long_description:
+ for line in textwrap.wrap(self.long_description, 72):
+ dk_file.write("#h %s\n" % line)
+
+ # Path alterations
+ for var, dirs in self.paths.items():
+ for directory in dirs:
+ dk_file.write("dk_alter %s %s\n" % (var, directory))
+
+ # Let CMake find this package.
+ dk_file.write("dk_alter CMAKE_PREFIX_PATH %s\n" % self.spec.prefix)
+
+
+class TclModule(EnvModule):
+ name = 'tcl'
+ path = join_path(spack.share_path, "modules")
+
+ @property
+ def file_name(self):
+ return join_path(TclModule.path, self.spec.architecture, self.use_name)
+
+
+ @property
+ def use_name(self):
+ return self.spec.format('$_$@$%@$+$#')
+
+
+ def _write(self, m_file):
+ # TODO: cateogry?
+ m_file.write('#%Module1.0\n')
+
+ # Short description
+ if self.short_description:
+ m_file.write('module-whatis \"%s\"\n\n' % self.short_description)
+
+ # Long description
+ if self.long_description:
+ m_file.write('proc ModulesHelp { } {\n')
+ doc = re.sub(r'"', '\"', self.long_description)
+ m_file.write("puts stderr \"%s\"\n" % doc)
+ m_file.write('}\n\n')
+
+ # Path alterations
+ for var, dirs in self.paths.items():
+ for directory in dirs:
+ m_file.write("prepend-path %s \"%s\"\n" % (var, directory))
+
+ m_file.write("prepend-path CMAKE_PREFIX_PATH \"%s\"\n" % self.spec.prefix)
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 48074aaebb..361fda1ba6 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -360,7 +360,7 @@ class Package(object):
# Version-ize the keys in versions dict
try:
- self.versions = { Version(v):h for v,h in self.versions.items() }
+ self.versions = dict((Version(v), h) for v,h in self.versions.items())
except ValueError:
raise ValueError("Keys of versions dict in package %s must be versions!"
% self.name)
@@ -773,7 +773,7 @@ class Package(object):
' '.join(formatted_deps))
self.remove_prefix()
- tty.msg("Successfully uninstalled %s." % self.spec)
+ tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
# Once everything else is done, run post install hooks
spack.hooks.post_uninstall(self)
diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py
index 4f9f606c89..72f9403a64 100644
--- a/lib/spack/spack/packages.py
+++ b/lib/spack/spack/packages.py
@@ -70,11 +70,11 @@ class PackageDB(object):
if not spec in self.instances:
package_class = self.get_class_for_package_name(spec.name)
try:
- self.instances[spec.name] = package_class(spec)
+ self.instances[spec.copy()] = package_class(spec)
except Exception, e:
raise FailedConstructorError(spec.name, e)
- return self.instances[spec.name]
+ return self.instances[spec]
@_autospec
@@ -118,7 +118,23 @@ class PackageDB(object):
"""Read installed package names straight from the install directory
layout.
"""
- return spack.install_layout.all_specs()
+ # Get specs from the directory layout but ensure that they're
+ # all normalized properly.
+ installed = []
+ for spec in spack.install_layout.all_specs():
+ spec.normalize()
+ installed.append(spec)
+ return installed
+
+
+ def installed_known_package_specs(self):
+ """Read installed package names straight from the install
+ directory layout, but return only specs for which the
+ package is known to this version of spack.
+ """
+ for spec in spack.install_layout.all_specs():
+ if self.exists(spec.name):
+ yield spec
@memoized
@@ -182,24 +198,6 @@ class PackageDB(object):
return cls
- def compute_dependents(self):
- """Reads in all package files and sets dependence information on
- Package objects in memory.
- """
- if not hasattr(compute_dependents, index):
- compute_dependents.index = {}
-
- for pkg in all_packages():
- if pkg._dependents is None:
- pkg._dependents = []
-
- for name, dep in pkg.dependencies.iteritems():
- dpkg = self.get(name)
- if dpkg._dependents is None:
- dpkg._dependents = []
- dpkg._dependents.append(pkg.name)
-
-
def graph_dependencies(self, out=sys.stdout):
"""Print out a graph of all the dependencies between package.
Graph is in dot format."""
@@ -214,10 +212,17 @@ class PackageDB(object):
return '"%s"' % string
deps = []
- for pkg in all_packages():
+ for pkg in self.all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
+
+ # Add edges for each depends_on in the package.
for dep_name, dep in pkg.dependencies.iteritems():
deps.append((pkg.name, dep_name))
+
+ # If the package provides something, add an edge for that.
+ for provider in set(p.name for p in pkg.provided):
+ deps.append((provider, pkg.name))
+
out.write('\n')
for pair in deps:
diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py
index a7b46cfb33..5afb7e7624 100644
--- a/lib/spack/spack/relations.py
+++ b/lib/spack/spack/relations.py
@@ -72,7 +72,6 @@ __all__ = [ 'depends_on', 'provides', 'patch', 'version' ]
import re
import inspect
-import importlib
from llnl.util.lang import *
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 45c3402617..4838fd9946 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -94,6 +94,7 @@ import sys
import itertools
import hashlib
from StringIO import StringIO
+from operator import attrgetter
import llnl.util.tty as tty
from llnl.util.lang import *
@@ -309,9 +310,8 @@ class DependencyMap(HashableMap):
def __str__(self):
- sorted_dep_names = sorted(self.keys())
return ''.join(
- ["^" + str(self[name]) for name in sorted_dep_names])
+ ["^" + str(self[name]) for name in sorted(self.keys())])
@key_ordering
@@ -352,10 +352,6 @@ class Spec(object):
self._normal = kwargs.get('normal', False)
self._concrete = kwargs.get('concrete', False)
- # Specs cannot be concrete and non-normal.
- if self._concrete:
- self._normal = True
-
# This allows users to construct a spec DAG with literals.
# Note that given two specs a and b, Spec(a) copies a, but
# Spec(a, b) will copy a but just add b as a dep.
@@ -454,10 +450,19 @@ class Spec(object):
return self._concrete
- def preorder_traversal(self, visited=None, d=0, **kwargs):
- """Generic preorder traversal of the DAG represented by this spec.
+ def traverse(self, visited=None, d=0, **kwargs):
+ """Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
+ order [=pre|post]
+ Order to traverse spec nodes. Defaults to preorder traversal.
+ Options are:
+
+ 'pre': Pre-order traversal; each node is yielded before its
+ children in the dependency DAG.
+ 'post': Post-order traversal; each node is yielded after its
+ children in the dependency DAG.
+
cover [=nodes|edges|paths]
Determines how extensively to cover the dag. Possible vlaues:
@@ -475,7 +480,7 @@ class Spec(object):
spec, but also their depth from the root in a (depth, node)
tuple.
- keyfun [=id]
+ key [=id]
Allow a custom key function to track the identity of nodes
in the traversal.
@@ -487,44 +492,57 @@ class Spec(object):
'parents', traverses upwards in the DAG towards the root.
"""
+ # get initial values for kwargs
depth = kwargs.get('depth', False)
key_fun = kwargs.get('key', id)
+ if isinstance(key_fun, basestring):
+ key_fun = attrgetter(key_fun)
yield_root = kwargs.get('root', True)
cover = kwargs.get('cover', 'nodes')
direction = kwargs.get('direction', 'children')
+ order = kwargs.get('order', 'pre')
- cover_values = ('nodes', 'edges', 'paths')
- if cover not in cover_values:
- raise ValueError("Invalid value for cover: %s. Choices are %s"
- % (cover, ",".join(cover_values)))
-
- direction_values = ('children', 'parents')
- if direction not in direction_values:
- raise ValueError("Invalid value for direction: %s. Choices are %s"
- % (direction, ",".join(direction_values)))
+ # Make sure kwargs have legal values; raise ValueError if not.
+ def validate(name, val, allowed_values):
+ if val not in allowed_values:
+ raise ValueError("Invalid value for %s: %s. Choices are %s"
+ % (name, val, ",".join(allowed_values)))
+ validate('cover', cover, ('nodes', 'edges', 'paths'))
+ validate('direction', direction, ('children', 'parents'))
+ validate('order', order, ('pre', 'post'))
if visited is None:
visited = set()
+ key = key_fun(self)
+
+ # Node traversal does not yield visited nodes.
+ if key in visited and cover == 'nodes':
+ return
+ # Determine whether and what to yield for this node.
+ yield_me = yield_root or d > 0
result = (d, self) if depth else self
- key = key_fun(self)
- if key in visited:
- if cover == 'nodes': return
- if yield_root or d > 0: yield result
- if cover == 'edges': return
- else:
- if yield_root or d > 0: yield result
+ # Preorder traversal yields before successors
+ if yield_me and order == 'pre':
+ yield result
- successors = self.dependencies
- if direction == 'parents':
- successors = self.dependents
+ # Edge traversal yields but skips children of visited nodes
+ if not (key in visited and cover == 'edges'):
+ # This code determines direction and yields the children/parents
+ successors = self.dependencies
+ if direction == 'parents':
+ successors = self.dependents
- visited.add(key)
- for name in sorted(successors):
- child = successors[name]
- for elt in child.preorder_traversal(visited, d+1, **kwargs):
- yield elt
+ visited.add(key)
+ for name in sorted(successors):
+ child = successors[name]
+ for elt in child.traverse(visited, d+1, **kwargs):
+ yield elt
+
+ # Postorder traversal yields after successors
+ if yield_me and order == 'post':
+ yield result
@property
@@ -540,13 +558,14 @@ class Spec(object):
def dep_hash(self, length=None):
- """Return a hash representing the dependencies of this spec
- This will always normalize first so that the hash is consistent.
- """
- self.normalize()
+ """Return a hash representing all dependencies of this spec
+ (direct and indirect).
+ If you want this hash to be consistent, you should
+ concretize the spec first so that it is not ambiguous.
+ """
sha = hashlib.sha1()
- sha.update(str(self.dependencies))
+ sha.update(self.dep_string())
full_hash = sha.hexdigest()
return full_hash[:length]
@@ -609,7 +628,7 @@ class Spec(object):
a problem.
"""
while True:
- virtuals =[v for v in self.preorder_traversal() if v.virtual]
+ virtuals =[v for v in self.traverse() if v.virtual]
if not virtuals:
return
@@ -620,7 +639,7 @@ class Spec(object):
spec._replace_with(concrete)
# If there are duplicate providers or duplicate provider deps, this
- # consolidates them and merges constraints.
+ # consolidates them and merge constraints.
self.normalize(force=True)
@@ -654,47 +673,51 @@ class Spec(object):
return clone
- def flat_dependencies(self):
- """Return a DependencyMap containing all of this spec's dependencies
- with their constraints merged. If there are any conflicts, throw
- an exception.
+ def flat_dependencies(self, **kwargs):
+ """Return a DependencyMap containing all of this spec's
+ dependencies with their constraints merged.
+
+ If copy is True, returns merged copies of its dependencies
+ without modifying the spec it's called on.
- This will work even on specs that are not normalized; i.e. specs
- that have two instances of the same dependency in the DAG.
- This is used as the first step of normalization.
+ If copy is False, clears this spec's dependencies and
+ returns them.
"""
- # This ensures that the package descriptions themselves are consistent
- if not self.virtual:
- self.package.validate_dependencies()
+ copy = kwargs.get('copy', True)
- # Once that is guaranteed, we know any constraint violations are due
- # to the spec -- so they're the user's fault, not Spack's.
flat_deps = DependencyMap()
try:
- for spec in self.preorder_traversal():
+ for spec in self.traverse(root=False):
if spec.name not in flat_deps:
- new_spec = spec.copy(dependencies=False)
- flat_deps[spec.name] = new_spec
-
+ if copy:
+ flat_deps[spec.name] = spec.copy(deps=False)
+ else:
+ flat_deps[spec.name] = spec
else:
flat_deps[spec.name].constrain(spec)
+ if not copy:
+ for dep in flat_deps.values():
+ dep.dependencies.clear()
+ dep.dependents.clear()
+ self.dependencies.clear()
+
+ return flat_deps
+
except UnsatisfiableSpecError, e:
- # This REALLY shouldn't happen unless something is wrong in spack.
- # It means we got a spec DAG with two instances of the same package
- # that had inconsistent constraints. There's no way for a user to
- # produce a spec like this (the parser adds all deps to the root),
- # so this means OUR code is not sane!
+ # Here, the DAG contains two instances of the same package
+ # with inconsistent constraints. Users cannot produce
+ # inconsistent specs like this on the command line: the
+ # parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
- return flat_deps
-
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
conflict, throw an exception. """
- self.dependencies = self.flat_dependencies()
+ for dep in self.flat_dependencies(copy=False):
+ self._add_dependency(dep)
def _normalize_helper(self, visited, spec_deps, provider_index):
@@ -797,11 +820,12 @@ class Spec(object):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
- # Then ensure that the packages referenced are sane, that the
- # provided spec is sane, and that all dependency specs are in the
- # root node of the spec. flat_dependencies will do this for us.
- spec_deps = self.flat_dependencies()
- self.dependencies.clear()
+ # Ensure that the package & dep descriptions are consistent & sane
+ if not self.virtual:
+ self.package.validate_dependencies()
+
+ # Get all the dependencies into one DependencyMap
+ spec_deps = self.flat_dependencies(copy=False)
# Figure out which of the user-provided deps provide virtual deps.
# Remove virtual deps that are already provided by something in the spec
@@ -814,7 +838,7 @@ class Spec(object):
# If there are deps specified but not visited, they're not
# actually deps of this package. Raise an error.
- extra = set(spec_deps.viewkeys()).difference(visited)
+ extra = set(spec_deps.keys()).difference(visited)
# Also subtract out all the packags that provide a needed vpkg
vdeps = [v for v in self.package.virtual_dependencies()]
@@ -843,7 +867,7 @@ class Spec(object):
If they're not, it will raise either UnknownPackageError or
UnsupportedCompilerError.
"""
- for spec in self.preorder_traversal():
+ for spec in self.traverse():
# Don't get a package for a virtual name.
if not spec.virtual:
spack.db.get(spec.name)
@@ -911,17 +935,17 @@ class Spec(object):
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
common = set(
- s.name for s in self.preorder_traversal(root=False))
+ s.name for s in self.traverse(root=False))
common.intersection_update(
- s.name for s in other.preorder_traversal(root=False))
+ s.name for s in other.traverse(root=False))
return common
def dep_difference(self, other):
"""Returns dependencies in self that are not in other."""
- mine = set(s.name for s in self.preorder_traversal(root=False))
+ mine = set(s.name for s in self.traverse(root=False))
mine.difference_update(
- s.name for s in other.preorder_traversal(root=False))
+ s.name for s in other.traverse(root=False))
return mine
@@ -980,8 +1004,8 @@ class Spec(object):
return False
# For virtual dependencies, we need to dig a little deeper.
- self_index = ProviderIndex(self.preorder_traversal(), restrict=True)
- other_index = ProviderIndex(other.preorder_traversal(), restrict=True)
+ self_index = ProviderIndex(self.traverse(), restrict=True)
+ other_index = ProviderIndex(other.traverse(), restrict=True)
# This handles cases where there are already providers for both vpkgs
if not self_index.satisfies(other_index):
@@ -1003,7 +1027,7 @@ class Spec(object):
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
- return [spec for spec in self.preorder_traversal() if spec.virtual]
+ return [spec for spec in self.traverse() if spec.virtual]
def _dup(self, other, **kwargs):
@@ -1018,22 +1042,29 @@ class Spec(object):
Whether deps should be copied too. Set to false to copy a
spec but not its dependencies.
"""
- # TODO: this needs to handle DAGs.
+ # Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture
- self.compiler = None
- if other.compiler:
- self.compiler = other.compiler.copy()
-
+ self.compiler = other.compiler.copy() if other.compiler else None
self.dependents = DependencyMap()
- copy_deps = kwargs.get('dependencies', True)
- if copy_deps:
- self.dependencies = other.dependencies.copy()
- else:
- self.dependencies = DependencyMap()
-
+ self.dependencies = DependencyMap()
+
+ # If we copy dependencies, preserve DAG structure in the new spec
+ if kwargs.get('deps', True):
+ # This copies the deps from other using _dup(deps=False)
+ new_nodes = other.flat_dependencies()
+ new_nodes[self.name] = self
+
+ # Hook everything up properly here by traversing.
+ for spec in other.traverse(cover='nodes'):
+ parent = new_nodes[spec.name]
+ for child in spec.dependencies:
+ if child not in parent.dependencies:
+ parent._add_dependency(new_nodes[child])
+
+ # Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
self._concrete = other._concrete
@@ -1057,7 +1088,7 @@ class Spec(object):
def __getitem__(self, name):
"""TODO: reconcile __getitem__, _add_dependency, __contains__"""
- for spec in self.preorder_traversal():
+ for spec in self.traverse():
if spec.name == name:
return spec
@@ -1068,15 +1099,82 @@ class Spec(object):
"""True if this spec has any dependency that satisfies the supplied
spec."""
spec = self._autospec(spec)
- for s in self.preorder_traversal():
+ for s in self.traverse():
if s.satisfies(spec):
return True
return False
- def _cmp_key(self):
+ def sorted_deps(self):
+ """Return a list of all dependencies sorted by name."""
+ deps = self.flat_dependencies()
+ return tuple(deps[name] for name in sorted(deps))
+
+
+ def _eq_dag(self, other, vs, vo):
+ """Recursive helper for eq_dag and ne_dag. Does the actual DAG
+ traversal."""
+ vs.add(id(self))
+ vo.add(id(other))
+
+ if self.ne_node(other):
+ return False
+
+ if len(self.dependencies) != len(other.dependencies):
+ return False
+
+ ssorted = [self.dependencies[name] for name in sorted(self.dependencies)]
+ osorted = [other.dependencies[name] for name in sorted(other.dependencies)]
+
+ for s, o in zip(ssorted, osorted):
+ visited_s = id(s) in vs
+ visited_o = id(o) in vo
+
+ # Check for duplicate or non-equal dependencies
+ if visited_s != visited_o: return False
+
+ # Skip visited nodes
+ if visited_s or visited_o: continue
+
+ # Recursive check for equality
+ if not s._eq_dag(o, vs, vo):
+ return False
+
+ return True
+
+
+ def eq_dag(self, other):
+ """True if the full dependency DAGs of specs are equal"""
+ return self._eq_dag(other, set(), set())
+
+
+ def ne_dag(self, other):
+ """True if the full dependency DAGs of specs are not equal"""
+ return not self.eq_dag(other)
+
+
+ def _cmp_node(self):
+ """Comparison key for just *this node* and not its deps."""
return (self.name, self.versions, self.variants,
- self.architecture, self.compiler, self.dependencies)
+ self.architecture, self.compiler)
+
+
+ def eq_node(self, other):
+ """Equality with another spec, not including dependencies."""
+ return self._cmp_node() == other._cmp_node()
+
+
+ def ne_node(self, other):
+ """Inequality with another spec, not including dependencies."""
+ return self._cmp_node() != other._cmp_node()
+
+
+ def _cmp_key(self):
+ """Comparison key for this node and all dependencies *without*
+ considering structure. This is the default, as
+ normalization will restore structure.
+ """
+ return self._cmp_node() + (self.sorted_deps(),)
def colorized(self):
@@ -1179,12 +1277,12 @@ class Spec(object):
return result
+ def dep_string(self):
+ return ''.join("^" + dep.format() for dep in self.sorted_deps())
+
+
def __str__(self):
- by_name = lambda d: d.name
- deps = self.preorder_traversal(key=by_name, root=False)
- sorted_deps = sorted(deps, key=by_name)
- dep_string = ''.join("^" + dep.format() for dep in sorted_deps)
- return self.format() + dep_string
+ return self.format() + self.dep_string()
def tree(self, **kwargs):
@@ -1200,7 +1298,7 @@ class Spec(object):
out = ""
cur_id = 0
ids = {}
- for d, node in self.preorder_traversal(cover=cover, depth=True):
+ for d, node in self.traverse(order='pre', cover=cover, depth=True):
out += " " * indent
if depth:
out += "%-4d" % d
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 839555d630..3dac798396 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -120,8 +120,7 @@ class Stage(object):
if spack.use_tmp_stage:
# If we're using a tmp dir, it's a link, and it points at the right spot,
# then keep it.
- if (os.path.commonprefix((real_path, real_tmp)) == real_tmp
- and os.path.exists(real_path)):
+ if (real_path.startswith(real_tmp) and os.path.exists(real_path)):
return False
else:
# otherwise, just unlink it and start over.
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 5442189c2e..8ddc7f227d 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -45,7 +45,9 @@ test_names = ['versions',
'multimethod',
'install',
'package_sanity',
- 'config']
+ 'config',
+ 'directory_layout',
+ 'python_version']
def list_tests():
@@ -70,7 +72,7 @@ def run(names, verbose=False):
runner = unittest.TextTestRunner(verbosity=verbosity)
- testsRun = errors = failures = skipped = 0
+ testsRun = errors = failures = 0
for test in names:
module = 'spack.test.' + test
print module
@@ -81,12 +83,10 @@ def run(names, verbose=False):
testsRun += result.testsRun
errors += len(result.errors)
failures += len(result.failures)
- skipped += len(result.skipped)
succeeded = not errors and not failures
tty.msg("Tests Complete.",
"%5d tests run" % testsRun,
- "%5d skipped" % skipped,
"%5d failures" % failures,
"%5d errors" % errors)
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 6ad2ef29d8..a7f4812c8c 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -134,29 +134,29 @@ class ConcretizeTest(MockPackagesTest):
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
- self.assertIn('mpi', spec.dependencies)
- self.assertNotIn('fake', spec)
+ self.assertTrue('mpi' in spec.dependencies)
+ self.assertFalse('fake' in spec)
spec.concretize()
- self.assertIn('zmpi', spec.dependencies)
- self.assertNotIn('mpi', spec)
- self.assertIn('fake', spec.dependencies['zmpi'])
+ self.assertTrue('zmpi' in spec.dependencies)
+ self.assertFalse('mpi' in spec)
+ self.assertTrue('fake' in spec.dependencies['zmpi'])
def test_virtual_is_fully_expanded_for_mpileaks(self):
spec = Spec('mpileaks ^mpi@10.0')
- self.assertIn('mpi', spec.dependencies)
- self.assertNotIn('fake', spec)
+ self.assertTrue('mpi' in spec.dependencies)
+ self.assertFalse('fake' in spec)
spec.concretize()
- self.assertIn('zmpi', spec.dependencies)
- self.assertIn('callpath', spec.dependencies)
- self.assertIn('zmpi', spec.dependencies['callpath'].dependencies)
- self.assertIn('fake', spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
+ self.assertTrue('zmpi' in spec.dependencies)
+ self.assertTrue('callpath' in spec.dependencies)
+ self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
+ self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
- self.assertNotIn('mpi', spec)
+ self.assertFalse('mpi' in spec)
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
new file mode 100644
index 0000000000..3e52954cfe
--- /dev/null
+++ b/lib/spack/spack/test/directory_layout.py
@@ -0,0 +1,155 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""\
+This test verifies that the Spack directory layout works properly.
+"""
+import unittest
+import tempfile
+import shutil
+import os
+from contextlib import closing
+
+from llnl.util.filesystem import *
+
+import spack
+from spack.spec import Spec
+from spack.packages import PackageDB
+from spack.directory_layout import SpecHashDirectoryLayout
+
+class DirectoryLayoutTest(unittest.TestCase):
+ """Tests that a directory layout works correctly and produces a
+ consistent install path."""
+
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp()
+ self.layout = SpecHashDirectoryLayout(self.tmpdir)
+
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir, ignore_errors=True)
+ self.layout = None
+
+
+ def test_read_and_write_spec(self):
+ """This goes through each package in spack and creates a directory for
+ it. It then ensures that the spec for the directory's
+ installed package can be read back in consistently, and
+ finally that the directory can be removed by the directory
+ layout.
+ """
+ for pkg in spack.db.all_packages():
+ spec = pkg.spec
+
+ # If a spec fails to concretize, just skip it. If it is a
+ # real error, it will be caught by concretization tests.
+ try:
+ spec.concretize()
+ except:
+ continue
+
+ self.layout.make_path_for_spec(spec)
+
+ install_dir = self.layout.path_for_spec(spec)
+ spec_path = self.layout.spec_file_path(spec)
+
+ # Ensure directory has been created in right place.
+ self.assertTrue(os.path.isdir(install_dir))
+ self.assertTrue(install_dir.startswith(self.tmpdir))
+
+ # Ensure spec file exists when directory is created
+ self.assertTrue(os.path.isfile(spec_path))
+ self.assertTrue(spec_path.startswith(install_dir))
+
+ # Make sure spec file can be read back in to get the original spec
+ spec_from_file = self.layout.read_spec(spec_path)
+ self.assertEqual(spec, spec_from_file)
+ self.assertTrue(spec.eq_dag, spec_from_file)
+ self.assertTrue(spec_from_file.concrete)
+
+ # Ensure that specs that come out "normal" are really normal.
+ with closing(open(spec_path)) as spec_file:
+ read_separately = Spec(spec_file.read())
+
+ read_separately.normalize()
+ self.assertEqual(read_separately, spec_from_file)
+
+ read_separately.concretize()
+ self.assertEqual(read_separately, spec_from_file)
+
+ # Make sure the dep hash of the read-in spec is the same
+ self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
+
+ # Ensure directories are properly removed
+ self.layout.remove_path_for_spec(spec)
+ self.assertFalse(os.path.isdir(install_dir))
+ self.assertFalse(os.path.exists(install_dir))
+
+
+ def test_handle_unknown_package(self):
+ """This test ensures that spack can at least do *some*
+ operations with packages that are installed but that it
+ does not know about. This is actually not such an uncommon
+ scenario with spack; it can happen when you switch from a
+ git branch where you're working on a new package.
+
+ This test ensures that the directory layout stores enough
+ information about installed packages' specs to uninstall
+ or query them again if the package goes away.
+ """
+ mock_db = PackageDB(spack.mock_packages_path)
+
+ not_in_mock = set(spack.db.all_package_names()).difference(
+ set(mock_db.all_package_names()))
+
+ # Create all the packages that are not in mock.
+ installed_specs = {}
+ for pkg_name in not_in_mock:
+ spec = spack.db.get(pkg_name).spec
+
+ # If a spec fails to concretize, just skip it. If it is a
+ # real error, it will be caught by concretization tests.
+ try:
+ spec.concretize()
+ except:
+ continue
+
+ self.layout.make_path_for_spec(spec)
+ installed_specs[spec] = self.layout.path_for_spec(spec)
+
+ tmp = spack.db
+ spack.db = mock_db
+
+ # Now check that even without the package files, we know
+ # enough to read a spec from the spec file.
+ for spec, path in installed_specs.items():
+ spec_from_file = self.layout.read_spec(join_path(path, '.spec'))
+
+ # To satisfy these conditions, directory layouts need to
+ # read in concrete specs from their install dirs somehow.
+ self.assertEqual(path, self.layout.path_for_spec(spec_from_file))
+ self.assertEqual(spec, spec_from_file)
+ self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
+
+ spack.db = tmp
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index a92bd92289..8047ab92e3 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -25,15 +25,18 @@
import os
import unittest
import shutil
+import tempfile
from contextlib import closing
from llnl.util.filesystem import *
import spack
from spack.stage import Stage
+from spack.directory_layout import SpecHashDirectoryLayout
from spack.util.executable import which
from spack.test.mock_packages_test import *
+
dir_name = 'trivial-1.0'
archive_name = 'trivial-1.0.tar.gz'
install_test_package = 'trivial_install_test_package'
@@ -66,9 +69,16 @@ class InstallTest(MockPackagesTest):
tar = which('tar')
tar('-czf', archive_name, dir_name)
- # We use a fake pacakge, so skip the checksum.
+ # We use a fake package, so skip the checksum.
spack.do_checksum = False
+ # Use a fake install directory to avoid conflicts bt/w
+ # installed pkgs and mock packages.
+ self.tmpdir = tempfile.mkdtemp()
+ self.orig_layout = spack.install_layout
+ spack.install_layout = SpecHashDirectoryLayout(self.tmpdir)
+
+
def tearDown(self):
super(InstallTest, self).tearDown()
@@ -78,6 +88,10 @@ class InstallTest(MockPackagesTest):
# Turn checksumming back on
spack.do_checksum = True
+ # restore spack's layout.
+ spack.install_layout = self.orig_layout
+ shutil.rmtree(self.tmpdir, ignore_errors=True)
+
def test_install_and_uninstall(self):
# Get a basic concrete spec for the trivial install package.
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index adde70ff6c..e948376039 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -39,7 +39,6 @@ def set_pkg_dep(pkg, spec):
class MockPackagesTest(unittest.TestCase):
- @classmethod
def setUp(self):
# Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with
@@ -52,7 +51,7 @@ class MockPackagesTest(unittest.TestCase):
'site' : spack.mock_site_config,
'user' : spack.mock_user_config }
- @classmethod
+
def tearDown(self):
"""Restore the real packages path after any test."""
spack.db = self.real_db
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
new file mode 100644
index 0000000000..04b4eadf34
--- /dev/null
+++ b/lib/spack/spack/test/python_version.py
@@ -0,0 +1,97 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+This test ensures that all Spack files are Python version 2.6 or less.
+
+Spack was originally 2.7, but enough systems in 2014 are still using
+2.6 on their frontend nodes that we need 2.6 to get adopted.
+"""
+import unittest
+import os
+import re
+from contextlib import closing
+
+import llnl.util.tty as tty
+
+from external import pyqver2
+import spack
+
+spack_max_version = (2,6)
+
+class PythonVersionTest(unittest.TestCase):
+
+ def spack_python_files(self):
+ # first file is the spack script.
+ yield spack.spack_file
+ yield os.path.join(spack.build_env_path, 'cc')
+
+ # Next files are all the source files and package files.
+ search_paths = [spack.lib_path, spack.var_path]
+
+ # Iterate through the whole spack source tree.
+ for path in search_paths:
+ for root, dirnames, filenames in os.walk(path):
+ for filename in filenames:
+ if re.match(r'^[^.#].*\.py$', filename):
+ yield os.path.join(root, filename)
+
+
+ def test_python_versions(self):
+ # dict version -> filename -> reasons
+ all_issues = {}
+
+ for fn in self.spack_python_files():
+ with closing(open(fn)) as pyfile:
+ versions = pyqver2.get_versions(pyfile.read())
+ for ver, reasons in versions.items():
+ if ver > spack_max_version:
+ if not ver in all_issues:
+ all_issues[ver] = {}
+ all_issues[ver][fn] = reasons
+
+ if all_issues:
+ tty.error("Spack must run on Python version %d.%d"
+ % spack_max_version)
+
+ for v in sorted(all_issues.keys(), reverse=True):
+ msgs = []
+ for fn in sorted(all_issues[v].keys()):
+ short_fn = fn
+ if fn.startswith(spack.prefix):
+ short_fn = fn[len(spack.prefix):]
+
+ reasons = [r for r in set(all_issues[v][fn]) if r]
+ for r in reasons:
+ msgs.append(("%s:%s" % ('spack' + short_fn, r[0]), r[1]))
+
+ tty.error("These files require version %d.%d:" % v)
+ maxlen = max(len(f) for f, prob in msgs)
+ fmt = "%%-%ds%%s" % (maxlen+3)
+ print fmt % ('File', 'Reason')
+ print fmt % ('-' * (maxlen), '-' * 20)
+ for msg in msgs:
+ print fmt % msg
+
+ self.assertTrue(len(all_issues) == 0)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 0c0b214ab7..fb67aa8a8d 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -48,7 +48,7 @@ class SpecDagTest(MockPackagesTest):
spec.package.validate_dependencies)
- def test_unique_node_traversal(self):
+ def test_preorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
dag.normalize()
@@ -56,14 +56,14 @@ class SpecDagTest(MockPackagesTest):
'zmpi', 'fake']
pairs = zip([0,1,2,3,4,2,3], names)
- traversal = dag.preorder_traversal()
- self.assertListEqual([x.name for x in traversal], names)
+ traversal = dag.traverse()
+ self.assertEqual([x.name for x in traversal], names)
- traversal = dag.preorder_traversal(depth=True)
- self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
+ traversal = dag.traverse(depth=True)
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
- def test_unique_edge_traversal(self):
+ def test_preorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
dag.normalize()
@@ -71,14 +71,14 @@ class SpecDagTest(MockPackagesTest):
'libelf', 'zmpi', 'fake', 'zmpi']
pairs = zip([0,1,2,3,4,3,2,3,1], names)
- traversal = dag.preorder_traversal(cover='edges')
- self.assertListEqual([x.name for x in traversal], names)
+ traversal = dag.traverse(cover='edges')
+ self.assertEqual([x.name for x in traversal], names)
- traversal = dag.preorder_traversal(cover='edges', depth=True)
- self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
+ traversal = dag.traverse(cover='edges', depth=True)
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
- def test_unique_path_traversal(self):
+ def test_preorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
dag.normalize()
@@ -86,11 +86,56 @@ class SpecDagTest(MockPackagesTest):
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
- traversal = dag.preorder_traversal(cover='paths')
- self.assertListEqual([x.name for x in traversal], names)
+ traversal = dag.traverse(cover='paths')
+ self.assertEqual([x.name for x in traversal], names)
- traversal = dag.preorder_traversal(cover='paths', depth=True)
- self.assertListEqual([(x, y.name) for x,y in traversal], pairs)
+ traversal = dag.traverse(cover='paths', depth=True)
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
+
+
+ def test_postorder_node_traversal(self):
+ dag = Spec('mpileaks ^zmpi')
+ dag.normalize()
+
+ names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
+ 'callpath', 'mpileaks']
+ pairs = zip([4,3,2,3,2,1,0], names)
+
+ traversal = dag.traverse(order='post')
+ self.assertEqual([x.name for x in traversal], names)
+
+ traversal = dag.traverse(depth=True, order='post')
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
+
+
+ def test_postorder_edge_traversal(self):
+ dag = Spec('mpileaks ^zmpi')
+ dag.normalize()
+
+ names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
+ 'callpath', 'zmpi', 'mpileaks']
+ pairs = zip([4,3,3,2,3,2,1,1,0], names)
+
+ traversal = dag.traverse(cover='edges', order='post')
+ self.assertEqual([x.name for x in traversal], names)
+
+ traversal = dag.traverse(cover='edges', depth=True, order='post')
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
+
+
+ def test_postorder_path_traversal(self):
+ dag = Spec('mpileaks ^zmpi')
+ dag.normalize()
+
+ names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
+ 'callpath', 'fake', 'zmpi', 'mpileaks']
+ pairs = zip([4,3,3,2,3,2,1,2,1,0], names)
+
+ traversal = dag.traverse(cover='paths', order='post')
+ self.assertEqual([x.name for x in traversal], names)
+
+ traversal = dag.traverse(cover='paths', depth=True, order='post')
+ self.assertEqual([(x, y.name) for x,y in traversal], pairs)
def test_conflicting_spec_constraints(self):
@@ -142,7 +187,7 @@ class SpecDagTest(MockPackagesTest):
# make sure nothing with the same name occurs twice
counts = {}
- for spec in dag.preorder_traversal(keyfun=id):
+ for spec in dag.traverse(key=id):
if not spec.name in counts:
counts[spec.name] = 0
counts[spec.name] += 1
@@ -152,15 +197,15 @@ class SpecDagTest(MockPackagesTest):
def check_links(self, spec_to_check):
- for spec in spec_to_check.preorder_traversal():
+ for spec in spec_to_check.traverse():
for dependent in spec.dependents.values():
- self.assertIn(
- spec.name, dependent.dependencies,
+ self.assertTrue(
+ spec.name in dependent.dependencies,
"%s not in dependencies of %s" % (spec.name, dependent.name))
for dependency in spec.dependencies.values():
- self.assertIn(
- spec.name, dependency.dependents,
+ self.assertTrue(
+ spec.name in dependency.dependents,
"%s not in dependents of %s" % (spec.name, dependency.name))
@@ -221,30 +266,53 @@ class SpecDagTest(MockPackagesTest):
def test_equal(self):
- spec = Spec('mpileaks ^callpath ^libelf ^libdwarf')
- self.assertNotEqual(spec, Spec(
- 'mpileaks', Spec('callpath',
- Spec('libdwarf',
- Spec('libelf')))))
- self.assertNotEqual(spec, Spec(
- 'mpileaks', Spec('callpath',
- Spec('libelf',
- Spec('libdwarf')))))
+ # Different spec structures to test for equality
+ flat = Spec('mpileaks ^callpath ^libelf ^libdwarf')
+
+ flat_init = Spec(
+ 'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf'))
+
+ flip_flat = Spec(
+ 'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath'))
+
+ dag = Spec('mpileaks', Spec('callpath',
+ Spec('libdwarf',
+ Spec('libelf'))))
- self.assertEqual(spec, Spec(
- 'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf')))
+ flip_dag = Spec('mpileaks', Spec('callpath',
+ Spec('libelf',
+ Spec('libdwarf'))))
- self.assertEqual(spec, Spec(
- 'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath')))
+ # All these are equal to each other with regular ==
+ specs = (flat, flat_init, flip_flat, dag, flip_dag)
+ for lhs, rhs in zip(specs, specs):
+ self.assertEqual(lhs, rhs)
+ self.assertEqual(str(lhs), str(rhs))
+
+ # Same DAGs constructed different ways are equal
+ self.assertTrue(flat.eq_dag(flat_init))
+
+ # order at same level does not matter -- (dep on same parent)
+ self.assertTrue(flat.eq_dag(flip_flat))
+
+ # DAGs should be unequal if nesting is different
+ self.assertFalse(flat.eq_dag(dag))
+ self.assertFalse(flat.eq_dag(flip_dag))
+ self.assertFalse(flip_flat.eq_dag(dag))
+ self.assertFalse(flip_flat.eq_dag(flip_dag))
+ self.assertFalse(dag.eq_dag(flip_dag))
def test_normalize_mpileaks(self):
+ # Spec parsed in from a string
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf')
+ # What that spec should look like after parsing
expected_flat = Spec(
'mpileaks', Spec('mpich'), Spec('callpath'), Spec('dyninst'),
Spec('libelf@1.8.11'), Spec('libdwarf'))
+ # What it should look like after normalization
mpich = Spec('mpich')
libelf = Spec('libelf@1.8.11')
expected_normalized = Spec(
@@ -257,7 +325,10 @@ class SpecDagTest(MockPackagesTest):
mpich),
mpich)
- expected_non_unique_nodes = Spec(
+ # Similar to normalized spec, but now with copies of the same
+ # libelf node. Normalization should result in a single unique
+ # node for each package, so this is the wrong DAG.
+ non_unique_nodes = Spec(
'mpileaks',
Spec('callpath',
Spec('dyninst',
@@ -267,21 +338,33 @@ class SpecDagTest(MockPackagesTest):
mpich),
Spec('mpich'))
- self.assertEqual(expected_normalized, expected_non_unique_nodes)
-
- self.assertEqual(str(expected_normalized), str(expected_non_unique_nodes))
- self.assertEqual(str(spec), str(expected_non_unique_nodes))
- self.assertEqual(str(expected_normalized), str(spec))
+ # All specs here should be equal under regular equality
+ specs = (spec, expected_flat, expected_normalized, non_unique_nodes)
+ for lhs, rhs in zip(specs, specs):
+ self.assertEqual(lhs, rhs)
+ self.assertEqual(str(lhs), str(rhs))
+ # Test that equal and equal_dag are doing the right thing
self.assertEqual(spec, expected_flat)
- self.assertNotEqual(spec, expected_normalized)
- self.assertNotEqual(spec, expected_non_unique_nodes)
+ self.assertTrue(spec.eq_dag(expected_flat))
+
+ self.assertEqual(spec, expected_normalized)
+ self.assertFalse(spec.eq_dag(expected_normalized))
+
+ self.assertEqual(spec, non_unique_nodes)
+ self.assertFalse(spec.eq_dag(non_unique_nodes))
spec.normalize()
- self.assertNotEqual(spec, expected_flat)
+ # After normalizing, spec_dag_equal should match the normalized spec.
+ self.assertEqual(spec, expected_flat)
+ self.assertFalse(spec.eq_dag(expected_flat))
+
self.assertEqual(spec, expected_normalized)
- self.assertEqual(spec, expected_non_unique_nodes)
+ self.assertTrue(spec.eq_dag(expected_normalized))
+
+ self.assertEqual(spec, non_unique_nodes)
+ self.assertFalse(spec.eq_dag(non_unique_nodes))
def test_normalize_with_virtual_package(self):
@@ -302,10 +385,63 @@ class SpecDagTest(MockPackagesTest):
def test_contains(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
- self.assertIn(Spec('mpi'), spec)
- self.assertIn(Spec('libelf'), spec)
- self.assertIn(Spec('libelf@1.8.11'), spec)
- self.assertNotIn(Spec('libelf@1.8.12'), spec)
- self.assertIn(Spec('libdwarf'), spec)
- self.assertNotIn(Spec('libgoblin'), spec)
- self.assertIn(Spec('mpileaks'), spec)
+ self.assertTrue(Spec('mpi') in spec)
+ self.assertTrue(Spec('libelf') in spec)
+ self.assertTrue(Spec('libelf@1.8.11') in spec)
+ self.assertFalse(Spec('libelf@1.8.12') in spec)
+ self.assertTrue(Spec('libdwarf') in spec)
+ self.assertFalse(Spec('libgoblin') in spec)
+ self.assertTrue(Spec('mpileaks') in spec)
+
+
+ def test_copy_simple(self):
+ orig = Spec('mpileaks')
+ copy = orig.copy()
+
+ self.check_links(copy)
+
+ self.assertEqual(orig, copy)
+ self.assertTrue(orig.eq_dag(copy))
+ self.assertEqual(orig._normal, copy._normal)
+ self.assertEqual(orig._concrete, copy._concrete)
+
+ # ensure no shared nodes bt/w orig and copy.
+ orig_ids = set(id(s) for s in orig.traverse())
+ copy_ids = set(id(s) for s in copy.traverse())
+ self.assertFalse(orig_ids.intersection(copy_ids))
+
+
+ def test_copy_normalized(self):
+ orig = Spec('mpileaks')
+ orig.normalize()
+ copy = orig.copy()
+
+ self.check_links(copy)
+
+ self.assertEqual(orig, copy)
+ self.assertTrue(orig.eq_dag(copy))
+ self.assertEqual(orig._normal, copy._normal)
+ self.assertEqual(orig._concrete, copy._concrete)
+
+ # ensure no shared nodes bt/w orig and copy.
+ orig_ids = set(id(s) for s in orig.traverse())
+ copy_ids = set(id(s) for s in copy.traverse())
+ self.assertFalse(orig_ids.intersection(copy_ids))
+
+
+ def test_copy_concretized(self):
+ orig = Spec('mpileaks')
+ orig.concretize()
+ copy = orig.copy()
+
+ self.check_links(copy)
+
+ self.assertEqual(orig, copy)
+ self.assertTrue(orig.eq_dag(copy))
+ self.assertEqual(orig._normal, copy._normal)
+ self.assertEqual(orig._concrete, copy._concrete)
+
+ # ensure no shared nodes bt/w orig and copy.
+ orig_ids = set(id(s) for s in orig.traverse())
+ copy_ids = set(id(s) for s in copy.traverse())
+ self.assertFalse(orig_ids.intersection(copy_ids))
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 08899f9810..a412549dc7 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -51,28 +51,20 @@ readme_text = "hello world!\n"
stage_name = 'spack-test-stage'
-class with_tmp(object):
- """Decorator that executes a function with or without spack set to use
- a temp dir. Spack allows builds to happen directly in the
- stage directory or in a tmp dir and symlinked into the stage
- directory, so this lets us use the same test in both cases.
+@contextmanager
+def use_tmp(use_tmp):
+ """Allow some test code to be executed with spack.use_tmp_stage
+ set to a certain value. Context manager makes sure it's reset
+ on failure.
"""
- def __init__(self, use_tmp):
- self.use_tmp = use_tmp
-
- def __call__(self, fun):
- use_tmp = self.use_tmp
- def new_test_function(self):
- old_tmp = spack.use_tmp_stage
- spack.use_tmp_stage = use_tmp
- fun(self)
- spack.use_tmp_stage = old_tmp
- return new_test_function
+ old_tmp = spack.use_tmp_stage
+ spack.use_tmp_stage = use_tmp
+ yield
+ spack.use_tmp_stage = old_tmp
class StageTest(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
+ def setUp(self):
"""This sets up a mock archive to fetch, and a mock temp space for use
by the Stage class. It doesn't actually create the Stage -- that
is done by individual tests.
@@ -92,52 +84,58 @@ class StageTest(unittest.TestCase):
tar('czf', archive_name, archive_dir)
# Make spack use the test environment for tmp stuff.
- cls.old_tmp_dirs = spack.tmp_dirs
+ self.old_tmp_dirs = spack.tmp_dirs
spack.tmp_dirs = [test_tmp_path]
+ # record this since this test changes to directories that will
+ # be removed.
+ self.working_dir = os.getcwd()
+
- @classmethod
- def tearDownClass(cls):
+ def tearDown(self):
"""Blows away the test environment directory."""
shutil.rmtree(test_files_dir)
+ # chdir back to original working dir
+ os.chdir(self.working_dir)
+
# restore spack's original tmp environment
- spack.tmp_dirs = cls.old_tmp_dirs
+ spack.tmp_dirs = self.old_tmp_dirs
def get_stage_path(self, stage, stage_name):
- """Figure out based on a stage and an intended name where it should
- be living. This depends on whether it's named or not.
+ """Figure out where a stage should be living. This depends on
+ whether it's named.
"""
- if stage_name:
+ if stage_name is not None:
# If it is a named stage, we know where the stage should be
- stage_path = join_path(spack.stage_path, stage_name)
+ return join_path(spack.stage_path, stage_name)
else:
# If it's unnamed, ensure that we ran mkdtemp in the right spot.
- stage_path = stage.path
- self.assertIsNotNone(stage_path)
- self.assertEqual(
- os.path.commonprefix((stage_path, spack.stage_path)),
- spack.stage_path)
- return stage_path
+ self.assertTrue(stage.path is not None)
+ self.assertTrue(stage.path.startswith(spack.stage_path))
+ return stage.path
def check_setup(self, stage, stage_name):
"""Figure out whether a stage was set up correctly."""
stage_path = self.get_stage_path(stage, stage_name)
+
+ # Ensure stage was created in the spack stage directory
self.assertTrue(os.path.isdir(stage_path))
if spack.use_tmp_stage:
- # Make sure everything was created and linked correctly for
- # a tmp stage.
+ # Check that the stage dir is really a symlink.
self.assertTrue(os.path.islink(stage_path))
+ # Make sure it points to a valid directory
target = os.path.realpath(stage_path)
self.assertTrue(os.path.isdir(target))
self.assertFalse(os.path.islink(target))
- self.assertEqual(
- os.path.commonprefix((target, test_tmp_path)),
- test_tmp_path)
+
+ # Make sure the directory is in the place we asked it to
+ # be (see setUp and tearDown)
+ self.assertTrue(target.startswith(test_tmp_path))
else:
# Make sure the stage path is NOT a link for a non-tmp stage
@@ -146,15 +144,15 @@ class StageTest(unittest.TestCase):
def check_fetch(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
- self.assertIn(archive_name, os.listdir(stage_path))
+ self.assertTrue(archive_name in os.listdir(stage_path))
self.assertEqual(join_path(stage_path, archive_name),
stage.archive_file)
def check_expand_archive(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
- self.assertIn(archive_name, os.listdir(stage_path))
- self.assertIn(archive_dir, os.listdir(stage_path))
+ self.assertTrue(archive_name in os.listdir(stage_path))
+ self.assertTrue(archive_dir in os.listdir(stage_path))
self.assertEqual(
join_path(stage_path, archive_dir),
@@ -192,32 +190,40 @@ class StageTest(unittest.TestCase):
self.assertFalse(os.path.exists(target))
- def checkSetupAndDestroy(self, stage_name=None):
- stage = Stage(archive_url, name=stage_name)
- self.check_setup(stage, stage_name)
-
- stage.destroy()
- self.check_destroy(stage, stage_name)
-
-
- @with_tmp(True)
def test_setup_and_destroy_name_with_tmp(self):
- self.checkSetupAndDestroy(stage_name)
+ with use_tmp(True):
+ stage = Stage(archive_url, name=stage_name)
+ self.check_setup(stage, stage_name)
+
+ stage.destroy()
+ self.check_destroy(stage, stage_name)
- @with_tmp(False)
def test_setup_and_destroy_name_without_tmp(self):
- self.checkSetupAndDestroy(stage_name)
+ with use_tmp(False):
+ stage = Stage(archive_url, name=stage_name)
+ self.check_setup(stage, stage_name)
+
+ stage.destroy()
+ self.check_destroy(stage, stage_name)
- @with_tmp(True)
def test_setup_and_destroy_no_name_with_tmp(self):
- self.checkSetupAndDestroy(None)
+ with use_tmp(True):
+ stage = Stage(archive_url)
+ self.check_setup(stage, None)
+
+ stage.destroy()
+ self.check_destroy(stage, None)
- @with_tmp(False)
def test_setup_and_destroy_no_name_without_tmp(self):
- self.checkSetupAndDestroy(None)
+ with use_tmp(False):
+ stage = Stage(archive_url)
+ self.check_setup(stage, None)
+
+ stage.destroy()
+ self.check_destroy(stage, None)
def test_chdir(self):
@@ -286,7 +292,7 @@ class StageTest(unittest.TestCase):
with closing(open('foobar', 'w')) as file:
file.write("this file is to be destroyed.")
- self.assertIn('foobar', os.listdir(stage.expanded_archive_path))
+ self.assertTrue('foobar' in os.listdir(stage.expanded_archive_path))
# Make sure the file is not there after restage.
stage.restage()
@@ -295,7 +301,7 @@ class StageTest(unittest.TestCase):
stage.chdir_to_archive()
self.check_chdir_to_archive(stage, stage_name)
- self.assertNotIn('foobar', os.listdir(stage.expanded_archive_path))
+ self.assertFalse('foobar' in os.listdir(stage.expanded_archive_path))
stage.destroy()
self.check_destroy(stage, stage_name)
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 4d8681bed9..950e807596 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -35,7 +35,7 @@ _acceptable_hashes = [
hashlib.sha512 ]
"""Index for looking up hasher for a digest."""
-_size_to_hash = { h().digest_size : h for h in _acceptable_hashes }
+_size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes)
def checksum(hashlib_algo, filename, **kwargs):
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index bc27b25889..923c7c19a5 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -121,7 +121,7 @@ def which(name, **kwargs):
for dir in path:
exe = os.path.join(dir, name)
- if os.access(exe, os.X_OK):
+ if os.path.isfile(exe) and os.access(exe, os.X_OK):
return Executable(exe)
if required:
diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py
index e7a9a6862e..234163bf52 100644
--- a/lib/spack/spack/util/string.py
+++ b/lib/spack/spack/util/string.py
@@ -33,7 +33,9 @@ def comma_list(sequence, article=''):
return sequence[0]
else:
out = ', '.join(str(s) for s in sequence[:-1])
- out += ', '
+ if len(sequence) != 2:
+ out += ',' # oxford comma
+ out += ' '
if article:
out += article + ' '
out += str(sequence[-1])
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 4558f88384..fbf86db8e1 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -47,7 +47,8 @@ import os
import sys
import re
from bisect import bisect_left
-from functools import total_ordering, wraps
+from functools import wraps
+from external.functools import total_ordering
import llnl.util.compare.none_high as none_high
import llnl.util.compare.none_low as none_low