diff options
author | Todd Gamblin <tgamblin@llnl.gov> | 2014-09-18 23:54:26 -0700 |
---|---|---|
committer | Todd Gamblin <tgamblin@llnl.gov> | 2014-09-18 23:54:26 -0700 |
commit | de030c9932d16bd7204f5e4fc13741da2233948e (patch) | |
tree | 7b83da7a5e4097c13c699f1a5508d6bdada712d5 | |
parent | 5829b44648f809a09d006b044d8244254a3d224a (diff) | |
parent | fa5594e13fa045ab9de6873274f21008e2f773ad (diff) | |
download | spack-de030c9932d16bd7204f5e4fc13741da2233948e.tar.gz spack-de030c9932d16bd7204f5e4fc13741da2233948e.tar.bz2 spack-de030c9932d16bd7204f5e4fc13741da2233948e.tar.xz spack-de030c9932d16bd7204f5e4fc13741da2233948e.zip |
Merge branch 'develop'
140 files changed, 6584 insertions, 941 deletions
diff --git a/.gitignore b/.gitignore index d6e0d3e60c..828fb04e7d 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ .idea /etc/spackconfig /share/spack/dotkit +/share/spack/modules @@ -35,4 +35,20 @@ for Spack is also available. Authors ---------------- Spack was written by Todd Gamblin, tgamblin@llnl.gov. -LLNL-CODE-647188 + +Significant contributions were also made by the following awesome +people: + + * David Beckingsale + * David Boehme + * Luc Jaulmes + * Matt Legendre + * Greg Lee + * Adam Moody + +Release +---------------- +Spack is released under an LGPL license. For more details see the +LICENSE file. + +``LLNL-CODE-647188`` @@ -24,11 +24,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys -if not sys.version_info[:2] >= (2,7): - sys.exit("Spack requires Python 2.7. Version was %s." % sys.version_info) +if not sys.version_info[:2] >= (2,6): + sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info) import os -import argparse # Find spack's location and its prefix. SPACK_FILE = os.path.realpath(os.path.expanduser(__file__)) @@ -51,6 +50,7 @@ del SPACK_FILE, SPACK_PREFIX, SPACK_LIB_PATH import llnl.util.tty as tty import spack from spack.error import SpackError +from external import argparse # Command parsing parser = argparse.ArgumentParser( @@ -75,6 +75,13 @@ for cmd in spack.cmd.commands: module = spack.cmd.get_module(cmd) subparser = subparsers.add_parser(cmd, help=module.description) module.setup_parser(subparser) + +# Just print help and exit if run with no arguments at all +if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + +# actually parse the args. args = parser.parse_args() # Set up environment based on args. diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 9e71d25caf..266e41cb48 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -1,12 +1,11 @@ #!/usr/bin/env python import sys -if not sys.version_info[:2] >= (2,7): - sys.exit("Spack requires Python 2.7. Version was %s." % sys.version_info) +if not sys.version_info[:2] >= (2,6): + sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info) import os import re import subprocess -import argparse from contextlib import closing # Import spack parameters through the build environment. @@ -18,6 +17,7 @@ if not spack_lib: # Grab a minimal set of spack packages sys.path.append(spack_lib) from spack.compilation import * +from external import argparse import llnl.util.tty as tty spack_prefix = get_env_var("SPACK_PREFIX") diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py new file mode 100644 index 0000000000..1cc981930a --- /dev/null +++ b/lib/spack/external/__init__.py @@ -0,0 +1,33 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +This module contains external, potentially separately licensed, +packages that are included in spack. + +So far: + argparse: We include our own version to be Python 2.6 compatible. + pyqver2: External script to query required python version of python source code. + Used for ensuring 2.6 compatibility. +""" diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py new file mode 100644 index 0000000000..c8dfdd3bed --- /dev/null +++ b/lib/spack/external/argparse.py @@ -0,0 +1,2382 @@ +# argparse is (c) 2006-2009 Steven J. Bethard <steven.bethard@gmail.com>. +# +# The argparse module was contributed to Python as of Python 2.7 and thus +# was licensed under the Python license. Same license applies to all files in +# the argparse package project. +# +# For details about the Python License, please see doc/Python-License.txt. +# +# History +# ------- +# +# Before (and including) argparse 1.1, the argparse package was licensed under +# Apache License v2.0. +# +# After argparse 1.1, all project files from the argparse project were deleted +# due to license compatibility issues between Apache License 2.0 and GNU GPL v2. +# +# The project repository then had a clean start with some files taken from +# Python 2.7.1, so definitely all files are under Python License now. +# +# Author: Steven J. Bethard <steven.bethard@gmail.com>. +# +"""Command-line parsing library + +This module is an optparse-inspired command-line parsing library that: + + - handles both optional and positional arguments + - produces highly informative usage messages + - supports parsers that dispatch to sub-parsers + +The following is a simple usage example that sums integers from the +command-line and writes the result to a file:: + + parser = argparse.ArgumentParser( + description='sum the integers at the command line') + parser.add_argument( + 'integers', metavar='int', nargs='+', type=int, + help='an integer to be summed') + parser.add_argument( + '--log', default=sys.stdout, type=argparse.FileType('w'), + help='the file where the sum should be written') + args = parser.parse_args() + args.log.write('%s' % sum(args.integers)) + args.log.close() + +The module contains the following public classes: + + - ArgumentParser -- The main entry point for command-line parsing. As the + example above shows, the add_argument() method is used to populate + the parser with actions for optional and positional arguments. Then + the parse_args() method is invoked to convert the args at the + command-line into an object with attributes. + + - ArgumentError -- The exception raised by ArgumentParser objects when + there are errors with the parser's actions. Errors raised while + parsing the command-line are caught by ArgumentParser and emitted + as command-line messages. + + - FileType -- A factory for defining types of files to be created. As the + example above shows, instances of FileType are typically passed as + the type= argument of add_argument() calls. + + - Action -- The base class for parser actions. Typically actions are + selected by passing strings like 'store_true' or 'append_const' to + the action= argument of add_argument(). However, for greater + customization of ArgumentParser actions, subclasses of Action may + be defined and passed as the action= argument. + + - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, + ArgumentDefaultsHelpFormatter -- Formatter classes which + may be passed as the formatter_class= argument to the + ArgumentParser constructor. HelpFormatter is the default, + RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser + not to change the formatting for help text, and + ArgumentDefaultsHelpFormatter adds information about argument defaults + to the help. + +All other classes in this module are considered implementation details. +(Also note that HelpFormatter and RawDescriptionHelpFormatter are only +considered public as object names -- the API of the formatter objects is +still considered an implementation detail.) +""" + +__version__ = '1.2.1' +__all__ = [ + 'ArgumentParser', + 'ArgumentError', + 'ArgumentTypeError', + 'FileType', + 'HelpFormatter', + 'ArgumentDefaultsHelpFormatter', + 'RawDescriptionHelpFormatter', + 'RawTextHelpFormatter', + 'Namespace', + 'Action', + 'ONE_OR_MORE', + 'OPTIONAL', + 'PARSER', + 'REMAINDER', + 'SUPPRESS', + 'ZERO_OR_MORE', +] + + +import copy as _copy +import os as _os +import re as _re +import sys as _sys +import textwrap as _textwrap + +from gettext import gettext as _ + +try: + set +except NameError: + # for python < 2.4 compatibility (sets module is there since 2.3): + from sets import Set as set + +try: + basestring +except NameError: + basestring = str + +try: + sorted +except NameError: + # for python < 2.4 compatibility: + def sorted(iterable, reverse=False): + result = list(iterable) + result.sort() + if reverse: + result.reverse() + return result + + +def _callable(obj): + return hasattr(obj, '__call__') or hasattr(obj, '__bases__') + + +SUPPRESS = '==SUPPRESS==' + +OPTIONAL = '?' +ZERO_OR_MORE = '*' +ONE_OR_MORE = '+' +PARSER = 'A...' +REMAINDER = '...' +_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args' + +# ============================= +# Utility functions and classes +# ============================= + +class _AttributeHolder(object): + """Abstract base class that provides __repr__. + + The __repr__ method returns a string in the format:: + ClassName(attr=name, attr=name, ...) + The attributes are determined either by a class-level attribute, + '_kwarg_names', or by inspecting the instance __dict__. + """ + + def __repr__(self): + type_name = type(self).__name__ + arg_strings = [] + for arg in self._get_args(): + arg_strings.append(repr(arg)) + for name, value in self._get_kwargs(): + arg_strings.append('%s=%r' % (name, value)) + return '%s(%s)' % (type_name, ', '.join(arg_strings)) + + def _get_kwargs(self): + return sorted(self.__dict__.items()) + + def _get_args(self): + return [] + + +def _ensure_value(namespace, name, value): + if getattr(namespace, name, None) is None: + setattr(namespace, name, value) + return getattr(namespace, name) + + +# =============== +# Formatting Help +# =============== + +class HelpFormatter(object): + """Formatter for generating usage messages and argument help strings. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def __init__(self, + prog, + indent_increment=2, + max_help_position=24, + width=None): + + # default setting for width + if width is None: + try: + width = int(_os.environ['COLUMNS']) + except (KeyError, ValueError): + width = 80 + width -= 2 + + self._prog = prog + self._indent_increment = indent_increment + self._max_help_position = max_help_position + self._width = width + + self._current_indent = 0 + self._level = 0 + self._action_max_length = 0 + + self._root_section = self._Section(self, None) + self._current_section = self._root_section + + self._whitespace_matcher = _re.compile(r'\s+') + self._long_break_matcher = _re.compile(r'\n\n\n+') + + # =============================== + # Section and indentation methods + # =============================== + def _indent(self): + self._current_indent += self._indent_increment + self._level += 1 + + def _dedent(self): + self._current_indent -= self._indent_increment + assert self._current_indent >= 0, 'Indent decreased below 0.' + self._level -= 1 + + class _Section(object): + + def __init__(self, formatter, parent, heading=None): + self.formatter = formatter + self.parent = parent + self.heading = heading + self.items = [] + + def format_help(self): + # format the indented section + if self.parent is not None: + self.formatter._indent() + join = self.formatter._join_parts + for func, args in self.items: + func(*args) + item_help = join([func(*args) for func, args in self.items]) + if self.parent is not None: + self.formatter._dedent() + + # return nothing if the section was empty + if not item_help: + return '' + + # add the heading if the section was non-empty + if self.heading is not SUPPRESS and self.heading is not None: + current_indent = self.formatter._current_indent + heading = '%*s%s:\n' % (current_indent, '', self.heading) + else: + heading = '' + + # join the section-initial newline, the heading and the help + return join(['\n', heading, item_help, '\n']) + + def _add_item(self, func, args): + self._current_section.items.append((func, args)) + + # ======================== + # Message building methods + # ======================== + def start_section(self, heading): + self._indent() + section = self._Section(self, self._current_section, heading) + self._add_item(section.format_help, []) + self._current_section = section + + def end_section(self): + self._current_section = self._current_section.parent + self._dedent() + + def add_text(self, text): + if text is not SUPPRESS and text is not None: + self._add_item(self._format_text, [text]) + + def add_usage(self, usage, actions, groups, prefix=None): + if usage is not SUPPRESS: + args = usage, actions, groups, prefix + self._add_item(self._format_usage, args) + + def add_argument(self, action): + if action.help is not SUPPRESS: + + # find all invocations + get_invocation = self._format_action_invocation + invocations = [get_invocation(action)] + for subaction in self._iter_indented_subactions(action): + invocations.append(get_invocation(subaction)) + + # update the maximum item length + invocation_length = max([len(s) for s in invocations]) + action_length = invocation_length + self._current_indent + self._action_max_length = max(self._action_max_length, + action_length) + + # add the item to the list + self._add_item(self._format_action, [action]) + + def add_arguments(self, actions): + for action in actions: + self.add_argument(action) + + # ======================= + # Help-formatting methods + # ======================= + def format_help(self): + help = self._root_section.format_help() + if help: + help = self._long_break_matcher.sub('\n\n', help) + help = help.strip('\n') + '\n' + return help + + def _join_parts(self, part_strings): + return ''.join([part + for part in part_strings + if part and part is not SUPPRESS]) + + def _format_usage(self, usage, actions, groups, prefix): + if prefix is None: + prefix = _('usage: ') + + # if usage is specified, use that + if usage is not None: + usage = usage % dict(prog=self._prog) + + # if no optionals or positionals are available, usage is just prog + elif usage is None and not actions: + usage = '%(prog)s' % dict(prog=self._prog) + + # if optionals and positionals are available, calculate usage + elif usage is None: + prog = '%(prog)s' % dict(prog=self._prog) + + # split optionals from positionals + optionals = [] + positionals = [] + for action in actions: + if action.option_strings: + optionals.append(action) + else: + positionals.append(action) + + # build full usage string + format = self._format_actions_usage + action_usage = format(optionals + positionals, groups) + usage = ' '.join([s for s in [prog, action_usage] if s]) + + # wrap the usage parts if it's too long + text_width = self._width - self._current_indent + if len(prefix) + len(usage) > text_width: + + # break usage into wrappable parts + part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' + opt_usage = format(optionals, groups) + pos_usage = format(positionals, groups) + opt_parts = _re.findall(part_regexp, opt_usage) + pos_parts = _re.findall(part_regexp, pos_usage) + assert ' '.join(opt_parts) == opt_usage + assert ' '.join(pos_parts) == pos_usage + + # helper for wrapping lines + def get_lines(parts, indent, prefix=None): + lines = [] + line = [] + if prefix is not None: + line_len = len(prefix) - 1 + else: + line_len = len(indent) - 1 + for part in parts: + if line_len + 1 + len(part) > text_width: + lines.append(indent + ' '.join(line)) + line = [] + line_len = len(indent) - 1 + line.append(part) + line_len += len(part) + 1 + if line: + lines.append(indent + ' '.join(line)) + if prefix is not None: + lines[0] = lines[0][len(indent):] + return lines + + # if prog is short, follow it with optionals or positionals + if len(prefix) + len(prog) <= 0.75 * text_width: + indent = ' ' * (len(prefix) + len(prog) + 1) + if opt_parts: + lines = get_lines([prog] + opt_parts, indent, prefix) + lines.extend(get_lines(pos_parts, indent)) + elif pos_parts: + lines = get_lines([prog] + pos_parts, indent, prefix) + else: + lines = [prog] + + # if prog is long, put it on its own line + else: + indent = ' ' * len(prefix) + parts = opt_parts + pos_parts + lines = get_lines(parts, indent) + if len(lines) > 1: + lines = [] + lines.extend(get_lines(opt_parts, indent)) + lines.extend(get_lines(pos_parts, indent)) + lines = [prog] + lines + + # join lines into usage + usage = '\n'.join(lines) + + # prefix with 'usage:' + return '%s%s\n\n' % (prefix, usage) + + def _format_actions_usage(self, actions, groups): + # find group indices and identify actions in groups + group_actions = set() + inserts = {} + for group in groups: + try: + start = actions.index(group._group_actions[0]) + except ValueError: + continue + else: + end = start + len(group._group_actions) + if actions[start:end] == group._group_actions: + for action in group._group_actions: + group_actions.add(action) + if not group.required: + if start in inserts: + inserts[start] += ' [' + else: + inserts[start] = '[' + inserts[end] = ']' + else: + if start in inserts: + inserts[start] += ' (' + else: + inserts[start] = '(' + inserts[end] = ')' + for i in range(start + 1, end): + inserts[i] = '|' + + # collect all actions format strings + parts = [] + for i, action in enumerate(actions): + + # suppressed arguments are marked with None + # remove | separators for suppressed arguments + if action.help is SUPPRESS: + parts.append(None) + if inserts.get(i) == '|': + inserts.pop(i) + elif inserts.get(i + 1) == '|': + inserts.pop(i + 1) + + # produce all arg strings + elif not action.option_strings: + part = self._format_args(action, action.dest) + + # if it's in a group, strip the outer [] + if action in group_actions: + if part[0] == '[' and part[-1] == ']': + part = part[1:-1] + + # add the action string to the list + parts.append(part) + + # produce the first way to invoke the option in brackets + else: + option_string = action.option_strings[0] + + # if the Optional doesn't take a value, format is: + # -s or --long + if action.nargs == 0: + part = '%s' % option_string + + # if the Optional takes a value, format is: + # -s ARGS or --long ARGS + else: + default = action.dest.upper() + args_string = self._format_args(action, default) + part = '%s %s' % (option_string, args_string) + + # make it look optional if it's not required or in a group + if not action.required and action not in group_actions: + part = '[%s]' % part + + # add the action string to the list + parts.append(part) + + # insert things at the necessary indices + for i in sorted(inserts, reverse=True): + parts[i:i] = [inserts[i]] + + # join all the action items with spaces + text = ' '.join([item for item in parts if item is not None]) + + # clean up separators for mutually exclusive groups + open = r'[\[(]' + close = r'[\])]' + text = _re.sub(r'(%s) ' % open, r'\1', text) + text = _re.sub(r' (%s)' % close, r'\1', text) + text = _re.sub(r'%s *%s' % (open, close), r'', text) + text = _re.sub(r'\(([^|]*)\)', r'\1', text) + text = text.strip() + + # return the text + return text + + def _format_text(self, text): + if '%(prog)' in text: + text = text % dict(prog=self._prog) + text_width = self._width - self._current_indent + indent = ' ' * self._current_indent + return self._fill_text(text, text_width, indent) + '\n\n' + + def _format_action(self, action): + # determine the required width and the entry label + help_position = min(self._action_max_length + 2, + self._max_help_position) + help_width = self._width - help_position + action_width = help_position - self._current_indent - 2 + action_header = self._format_action_invocation(action) + + # ho nelp; start on same line and add a final newline + if not action.help: + tup = self._current_indent, '', action_header + action_header = '%*s%s\n' % tup + + # short action name; start on the same line and pad two spaces + elif len(action_header) <= action_width: + tup = self._current_indent, '', action_width, action_header + action_header = '%*s%-*s ' % tup + indent_first = 0 + + # long action name; start on the next line + else: + tup = self._current_indent, '', action_header + action_header = '%*s%s\n' % tup + indent_first = help_position + + # collect the pieces of the action help + parts = [action_header] + + # if there was help for the action, add lines of help text + if action.help: + help_text = self._expand_help(action) + help_lines = self._split_lines(help_text, help_width) + parts.append('%*s%s\n' % (indent_first, '', help_lines[0])) + for line in help_lines[1:]: + parts.append('%*s%s\n' % (help_position, '', line)) + + # or add a newline if the description doesn't end with one + elif not action_header.endswith('\n'): + parts.append('\n') + + # if there are any sub-actions, add their help as well + for subaction in self._iter_indented_subactions(action): + parts.append(self._format_action(subaction)) + + # return a single string + return self._join_parts(parts) + + def _format_action_invocation(self, action): + if not action.option_strings: + metavar, = self._metavar_formatter(action, action.dest)(1) + return metavar + + else: + parts = [] + + # if the Optional doesn't take a value, format is: + # -s, --long + if action.nargs == 0: + parts.extend(action.option_strings) + + # if the Optional takes a value, format is: + # -s ARGS, --long ARGS + else: + default = action.dest.upper() + args_string = self._format_args(action, default) + for option_string in action.option_strings: + parts.append('%s %s' % (option_string, args_string)) + + return ', '.join(parts) + + def _metavar_formatter(self, action, default_metavar): + if action.metavar is not None: + result = action.metavar + elif action.choices is not None: + choice_strs = [str(choice) for choice in action.choices] + result = '{%s}' % ','.join(choice_strs) + else: + result = default_metavar + + def format(tuple_size): + if isinstance(result, tuple): + return result + else: + return (result, ) * tuple_size + return format + + def _format_args(self, action, default_metavar): + get_metavar = self._metavar_formatter(action, default_metavar) + if action.nargs is None: + result = '%s' % get_metavar(1) + elif action.nargs == OPTIONAL: + result = '[%s]' % get_metavar(1) + elif action.nargs == ZERO_OR_MORE: + result = '[%s [%s ...]]' % get_metavar(2) + elif action.nargs == ONE_OR_MORE: + result = '%s [%s ...]' % get_metavar(2) + elif action.nargs == REMAINDER: + result = '...' + elif action.nargs == PARSER: + result = '%s ...' % get_metavar(1) + else: + formats = ['%s' for _ in range(action.nargs)] + result = ' '.join(formats) % get_metavar(action.nargs) + return result + + def _expand_help(self, action): + params = dict(vars(action), prog=self._prog) + for name in list(params): + if params[name] is SUPPRESS: + del params[name] + for name in list(params): + if hasattr(params[name], '__name__'): + params[name] = params[name].__name__ + if params.get('choices') is not None: + choices_str = ', '.join([str(c) for c in params['choices']]) + params['choices'] = choices_str + return self._get_help_string(action) % params + + def _iter_indented_subactions(self, action): + try: + get_subactions = action._get_subactions + except AttributeError: + pass + else: + self._indent() + for subaction in get_subactions(): + yield subaction + self._dedent() + + def _split_lines(self, text, width): + text = self._whitespace_matcher.sub(' ', text).strip() + return _textwrap.wrap(text, width) + + def _fill_text(self, text, width, indent): + text = self._whitespace_matcher.sub(' ', text).strip() + return _textwrap.fill(text, width, initial_indent=indent, + subsequent_indent=indent) + + def _get_help_string(self, action): + return action.help + + +class RawDescriptionHelpFormatter(HelpFormatter): + """Help message formatter which retains any formatting in descriptions. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _fill_text(self, text, width, indent): + return ''.join([indent + line for line in text.splitlines(True)]) + + +class RawTextHelpFormatter(RawDescriptionHelpFormatter): + """Help message formatter which retains formatting of all help text. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _split_lines(self, text, width): + return text.splitlines() + + +class ArgumentDefaultsHelpFormatter(HelpFormatter): + """Help message formatter which adds default values to argument help. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _get_help_string(self, action): + help = action.help + if '%(default)' not in action.help: + if action.default is not SUPPRESS: + defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] + if action.option_strings or action.nargs in defaulting_nargs: + help += ' (default: %(default)s)' + return help + + +# ===================== +# Options and Arguments +# ===================== + +def _get_action_name(argument): + if argument is None: + return None + elif argument.option_strings: + return '/'.join(argument.option_strings) + elif argument.metavar not in (None, SUPPRESS): + return argument.metavar + elif argument.dest not in (None, SUPPRESS): + return argument.dest + else: + return None + + +class ArgumentError(Exception): + """An error from creating or using an argument (optional or positional). + + The string value of this exception is the message, augmented with + information about the argument that caused it. + """ + + def __init__(self, argument, message): + self.argument_name = _get_action_name(argument) + self.message = message + + def __str__(self): + if self.argument_name is None: + format = '%(message)s' + else: + format = 'argument %(argument_name)s: %(message)s' + return format % dict(message=self.message, + argument_name=self.argument_name) + + +class ArgumentTypeError(Exception): + """An error from trying to convert a command line string to a type.""" + pass + + +# ============== +# Action classes +# ============== + +class Action(_AttributeHolder): + """Information about how to convert command line strings to Python objects. + + Action objects are used by an ArgumentParser to represent the information + needed to parse a single argument from one or more strings from the + command line. The keyword arguments to the Action constructor are also + all attributes of Action instances. + + Keyword Arguments: + + - option_strings -- A list of command-line option strings which + should be associated with this action. + + - dest -- The name of the attribute to hold the created object(s) + + - nargs -- The number of command-line arguments that should be + consumed. By default, one argument will be consumed and a single + value will be produced. Other values include: + - N (an integer) consumes N arguments (and produces a list) + - '?' consumes zero or one arguments + - '*' consumes zero or more arguments (and produces a list) + - '+' consumes one or more arguments (and produces a list) + Note that the difference between the default and nargs=1 is that + with the default, a single value will be produced, while with + nargs=1, a list containing a single value will be produced. + + - const -- The value to be produced if the option is specified and the + option uses an action that takes no values. + + - default -- The value to be produced if the option is not specified. + + - type -- The type which the command-line arguments should be converted + to, should be one of 'string', 'int', 'float', 'complex' or a + callable object that accepts a single string argument. If None, + 'string' is assumed. + + - choices -- A container of values that should be allowed. If not None, + after a command-line argument has been converted to the appropriate + type, an exception will be raised if it is not a member of this + collection. + + - required -- True if the action must always be specified at the + command line. This is only meaningful for optional command-line + arguments. + + - help -- The help string describing the argument. + + - metavar -- The name to be used for the option's argument with the + help string. If None, the 'dest' value will be used as the name. + """ + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + self.option_strings = option_strings + self.dest = dest + self.nargs = nargs + self.const = const + self.default = default + self.type = type + self.choices = choices + self.required = required + self.help = help + self.metavar = metavar + + def _get_kwargs(self): + names = [ + 'option_strings', + 'dest', + 'nargs', + 'const', + 'default', + 'type', + 'choices', + 'help', + 'metavar', + ] + return [(name, getattr(self, name)) for name in names] + + def __call__(self, parser, namespace, values, option_string=None): + raise NotImplementedError(_('.__call__() not defined')) + + +class _StoreAction(Action): + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + if nargs == 0: + raise ValueError('nargs for store actions must be > 0; if you ' + 'have nothing to store, actions such as store ' + 'true or store const may be more appropriate') + if const is not None and nargs != OPTIONAL: + raise ValueError('nargs must be %r to supply const' % OPTIONAL) + super(_StoreAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=nargs, + const=const, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, values) + + +class _StoreConstAction(Action): + + def __init__(self, + option_strings, + dest, + const, + default=None, + required=False, + help=None, + metavar=None): + super(_StoreConstAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=const, + default=default, + required=required, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + +class _StoreTrueAction(_StoreConstAction): + + def __init__(self, + option_strings, + dest, + default=False, + required=False, + help=None): + super(_StoreTrueAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + required=required, + help=help) + + +class _StoreFalseAction(_StoreConstAction): + + def __init__(self, + option_strings, + dest, + default=True, + required=False, + help=None): + super(_StoreFalseAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=False, + default=default, + required=required, + help=help) + + +class _AppendAction(Action): + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + if nargs == 0: + raise ValueError('nargs for append actions must be > 0; if arg ' + 'strings are not supplying the value to append, ' + 'the append const action may be more appropriate') + if const is not None and nargs != OPTIONAL: + raise ValueError('nargs must be %r to supply const' % OPTIONAL) + super(_AppendAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=nargs, + const=const, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + items = _copy.copy(_ensure_value(namespace, self.dest, [])) + items.append(values) + setattr(namespace, self.dest, items) + + +class _AppendConstAction(Action): + + def __init__(self, + option_strings, + dest, + const, + default=None, + required=False, + help=None, + metavar=None): + super(_AppendConstAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=const, + default=default, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + items = _copy.copy(_ensure_value(namespace, self.dest, [])) + items.append(self.const) + setattr(namespace, self.dest, items) + + +class _CountAction(Action): + + def __init__(self, + option_strings, + dest, + default=None, + required=False, + help=None): + super(_CountAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = _ensure_value(namespace, self.dest, 0) + 1 + setattr(namespace, self.dest, new_count) + + +class _HelpAction(Action): + + def __init__(self, + option_strings, + dest=SUPPRESS, + default=SUPPRESS, + help=None): + super(_HelpAction, self).__init__( + option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + parser.print_help() + parser.exit() + + +class _VersionAction(Action): + + def __init__(self, + option_strings, + version=None, + dest=SUPPRESS, + default=SUPPRESS, + help="show program's version number and exit"): + super(_VersionAction, self).__init__( + option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + self.version = version + + def __call__(self, parser, namespace, values, option_string=None): + version = self.version + if version is None: + version = parser.version + formatter = parser._get_formatter() + formatter.add_text(version) + parser.exit(message=formatter.format_help()) + + +class _SubParsersAction(Action): + + class _ChoicesPseudoAction(Action): + + def __init__(self, name, help): + sup = super(_SubParsersAction._ChoicesPseudoAction, self) + sup.__init__(option_strings=[], dest=name, help=help) + + def __init__(self, + option_strings, + prog, + parser_class, + dest=SUPPRESS, + help=None, + metavar=None): + + self._prog_prefix = prog + self._parser_class = parser_class + self._name_parser_map = {} + self._choices_actions = [] + + super(_SubParsersAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=PARSER, + choices=self._name_parser_map, + help=help, + metavar=metavar) + + def add_parser(self, name, **kwargs): + # set prog from the existing prefix + if kwargs.get('prog') is None: + kwargs['prog'] = '%s %s' % (self._prog_prefix, name) + + # create a pseudo-action to hold the choice help + if 'help' in kwargs: + help = kwargs.pop('help') + choice_action = self._ChoicesPseudoAction(name, help) + self._choices_actions.append(choice_action) + + # create the parser and add it to the map + parser = self._parser_class(**kwargs) + self._name_parser_map[name] = parser + return parser + + def _get_subactions(self): + return self._choices_actions + + def __call__(self, parser, namespace, values, option_string=None): + parser_name = values[0] + arg_strings = values[1:] + + # set the parser name if requested + if self.dest is not SUPPRESS: + setattr(namespace, self.dest, parser_name) + + # select the parser + try: + parser = self._name_parser_map[parser_name] + except KeyError: + tup = parser_name, ', '.join(self._name_parser_map) + msg = _('unknown parser %r (choices: %s)' % tup) + raise ArgumentError(self, msg) + + # parse all the remaining options into the namespace + # store any unrecognized options on the object, so that the top + # level parser can decide what to do with them + namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) + if arg_strings: + vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) + getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) + + +# ============== +# Type classes +# ============== + +class FileType(object): + """Factory for creating file object types + + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + """ + + def __init__(self, mode='r', bufsize=None): + self._mode = mode + self._bufsize = bufsize + + def __call__(self, string): + # the special argument "-" means sys.std{in,out} + if string == '-': + if 'r' in self._mode: + return _sys.stdin + elif 'w' in self._mode: + return _sys.stdout + else: + msg = _('argument "-" with mode %r' % self._mode) + raise ValueError(msg) + + # all other arguments are used as file names + if self._bufsize: + return open(string, self._mode, self._bufsize) + else: + return open(string, self._mode) + + def __repr__(self): + args = [self._mode, self._bufsize] + args_str = ', '.join([repr(arg) for arg in args if arg is not None]) + return '%s(%s)' % (type(self).__name__, args_str) + +# =========================== +# Optional and Positional Parsing +# =========================== + +class Namespace(_AttributeHolder): + """Simple object for storing attributes. + + Implements equality by attribute names and values, and provides a simple + string representation. + """ + + def __init__(self, **kwargs): + for name in kwargs: + setattr(self, name, kwargs[name]) + + __hash__ = None + + def __eq__(self, other): + return vars(self) == vars(other) + + def __ne__(self, other): + return not (self == other) + + def __contains__(self, key): + return key in self.__dict__ + + +class _ActionsContainer(object): + + def __init__(self, + description, + prefix_chars, + argument_default, + conflict_handler): + super(_ActionsContainer, self).__init__() + + self.description = description + self.argument_default = argument_default + self.prefix_chars = prefix_chars + self.conflict_handler = conflict_handler + + # set up registries + self._registries = {} + + # register actions + self.register('action', None, _StoreAction) + self.register('action', 'store', _StoreAction) + self.register('action', 'store_const', _StoreConstAction) + self.register('action', 'store_true', _StoreTrueAction) + self.register('action', 'store_false', _StoreFalseAction) + self.register('action', 'append', _AppendAction) + self.register('action', 'append_const', _AppendConstAction) + self.register('action', 'count', _CountAction) + self.register('action', 'help', _HelpAction) + self.register('action', 'version', _VersionAction) + self.register('action', 'parsers', _SubParsersAction) + + # raise an exception if the conflict handler is invalid + self._get_handler() + + # action storage + self._actions = [] + self._option_string_actions = {} + + # groups + self._action_groups = [] + self._mutually_exclusive_groups = [] + + # defaults storage + self._defaults = {} + + # determines whether an "option" looks like a negative number + self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$') + + # whether or not there are any optionals that look like negative + # numbers -- uses a list so it can be shared and edited + self._has_negative_number_optionals = [] + + # ==================== + # Registration methods + # ==================== + def register(self, registry_name, value, object): + registry = self._registries.setdefault(registry_name, {}) + registry[value] = object + + def _registry_get(self, registry_name, value, default=None): + return self._registries[registry_name].get(value, default) + + # ================================== + # Namespace default accessor methods + # ================================== + def set_defaults(self, **kwargs): + self._defaults.update(kwargs) + + # if these defaults match any existing arguments, replace + # the previous default on the object with the new one + for action in self._actions: + if action.dest in kwargs: + action.default = kwargs[action.dest] + + def get_default(self, dest): + for action in self._actions: + if action.dest == dest and action.default is not None: + return action.default + return self._defaults.get(dest, None) + + + # ======================= + # Adding argument actions + # ======================= + def add_argument(self, *args, **kwargs): + """ + add_argument(dest, ..., name=value, ...) + add_argument(option_string, option_string, ..., name=value, ...) + """ + + # if no positional args are supplied or only one is supplied and + # it doesn't look like an option string, parse a positional + # argument + chars = self.prefix_chars + if not args or len(args) == 1 and args[0][0] not in chars: + if args and 'dest' in kwargs: + raise ValueError('dest supplied twice for positional argument') + kwargs = self._get_positional_kwargs(*args, **kwargs) + + # otherwise, we're adding an optional argument + else: + kwargs = self._get_optional_kwargs(*args, **kwargs) + + # if no default was supplied, use the parser-level default + if 'default' not in kwargs: + dest = kwargs['dest'] + if dest in self._defaults: + kwargs['default'] = self._defaults[dest] + elif self.argument_default is not None: + kwargs['default'] = self.argument_default + + # create the action object, and add it to the parser + action_class = self._pop_action_class(kwargs) + if not _callable(action_class): + raise ValueError('unknown action "%s"' % action_class) + action = action_class(**kwargs) + + # raise an error if the action type is not callable + type_func = self._registry_get('type', action.type, action.type) + if not _callable(type_func): + raise ValueError('%r is not callable' % type_func) + + return self._add_action(action) + + def add_argument_group(self, *args, **kwargs): + group = _ArgumentGroup(self, *args, **kwargs) + self._action_groups.append(group) + return group + + def add_mutually_exclusive_group(self, **kwargs): + group = _MutuallyExclusiveGroup(self, **kwargs) + self._mutually_exclusive_groups.append(group) + return group + + def _add_action(self, action): + # resolve any conflicts + self._check_conflict(action) + + # add to actions list + self._actions.append(action) + action.container = self + + # index the action by any option strings it has + for option_string in action.option_strings: + self._option_string_actions[option_string] = action + + # set the flag if any option strings look like negative numbers + for option_string in action.option_strings: + if self._negative_number_matcher.match(option_string): + if not self._has_negative_number_optionals: + self._has_negative_number_optionals.append(True) + + # return the created action + return action + + def _remove_action(self, action): + self._actions.remove(action) + + def _add_container_actions(self, container): + # collect groups by titles + title_group_map = {} + for group in self._action_groups: + if group.title in title_group_map: + msg = _('cannot merge actions - two groups are named %r') + raise ValueError(msg % (group.title)) + title_group_map[group.title] = group + + # map each action to its group + group_map = {} + for group in container._action_groups: + + # if a group with the title exists, use that, otherwise + # create a new group matching the container's group + if group.title not in title_group_map: + title_group_map[group.title] = self.add_argument_group( + title=group.title, + description=group.description, + conflict_handler=group.conflict_handler) + + # map the actions to their new group + for action in group._group_actions: + group_map[action] = title_group_map[group.title] + + # add container's mutually exclusive groups + # NOTE: if add_mutually_exclusive_group ever gains title= and + # description= then this code will need to be expanded as above + for group in container._mutually_exclusive_groups: + mutex_group = self.add_mutually_exclusive_group( + required=group.required) + + # map the actions to their new mutex group + for action in group._group_actions: + group_map[action] = mutex_group + + # add all actions to this container or their group + for action in container._actions: + group_map.get(action, self)._add_action(action) + + def _get_positional_kwargs(self, dest, **kwargs): + # make sure required is not specified + if 'required' in kwargs: + msg = _("'required' is an invalid argument for positionals") + raise TypeError(msg) + + # mark positional arguments as required if at least one is + # always required + if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]: + kwargs['required'] = True + if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs: + kwargs['required'] = True + + # return the keyword arguments with no option strings + return dict(kwargs, dest=dest, option_strings=[]) + + def _get_optional_kwargs(self, *args, **kwargs): + # determine short and long option strings + option_strings = [] + long_option_strings = [] + for option_string in args: + # error on strings that don't start with an appropriate prefix + if not option_string[0] in self.prefix_chars: + msg = _('invalid option string %r: ' + 'must start with a character %r') + tup = option_string, self.prefix_chars + raise ValueError(msg % tup) + + # strings starting with two prefix characters are long options + option_strings.append(option_string) + if option_string[0] in self.prefix_chars: + if len(option_string) > 1: + if option_string[1] in self.prefix_chars: + long_option_strings.append(option_string) + + # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' + dest = kwargs.pop('dest', None) + if dest is None: + if long_option_strings: + dest_option_string = long_option_strings[0] + else: + dest_option_string = option_strings[0] + dest = dest_option_string.lstrip(self.prefix_chars) + if not dest: + msg = _('dest= is required for options like %r') + raise ValueError(msg % option_string) + dest = dest.replace('-', '_') + + # return the updated keyword arguments + return dict(kwargs, dest=dest, option_strings=option_strings) + + def _pop_action_class(self, kwargs, default=None): + action = kwargs.pop('action', default) + return self._registry_get('action', action, action) + + def _get_handler(self): + # determine function from conflict handler string + handler_func_name = '_handle_conflict_%s' % self.conflict_handler + try: + return getattr(self, handler_func_name) + except AttributeError: + msg = _('invalid conflict_resolution value: %r') + raise ValueError(msg % self.conflict_handler) + + def _check_conflict(self, action): + + # find all options that conflict with this option + confl_optionals = [] + for option_string in action.option_strings: + if option_string in self._option_string_actions: + confl_optional = self._option_string_actions[option_string] + confl_optionals.append((option_string, confl_optional)) + + # resolve any conflicts + if confl_optionals: + conflict_handler = self._get_handler() + conflict_handler(action, confl_optionals) + + def _handle_conflict_error(self, action, conflicting_actions): + message = _('conflicting option string(s): %s') + conflict_string = ', '.join([option_string + for option_string, action + in conflicting_actions]) + raise ArgumentError(action, message % conflict_string) + + def _handle_conflict_resolve(self, action, conflicting_actions): + + # remove all conflicting options + for option_string, action in conflicting_actions: + + # remove the conflicting option + action.option_strings.remove(option_string) + self._option_string_actions.pop(option_string, None) + + # if the option now has no option string, remove it from the + # container holding it + if not action.option_strings: + action.container._remove_action(action) + + +class _ArgumentGroup(_ActionsContainer): + + def __init__(self, container, title=None, description=None, **kwargs): + # add any missing keyword arguments by checking the container + update = kwargs.setdefault + update('conflict_handler', container.conflict_handler) + update('prefix_chars', container.prefix_chars) + update('argument_default', container.argument_default) + super_init = super(_ArgumentGroup, self).__init__ + super_init(description=description, **kwargs) + + # group attributes + self.title = title + self._group_actions = [] + + # share most attributes with the container + self._registries = container._registries + self._actions = container._actions + self._option_string_actions = container._option_string_actions + self._defaults = container._defaults + self._has_negative_number_optionals = \ + container._has_negative_number_optionals + + def _add_action(self, action): + action = super(_ArgumentGroup, self)._add_action(action) + self._group_actions.append(action) + return action + + def _remove_action(self, action): + super(_ArgumentGroup, self)._remove_action(action) + self._group_actions.remove(action) + + +class _MutuallyExclusiveGroup(_ArgumentGroup): + + def __init__(self, container, required=False): + super(_MutuallyExclusiveGroup, self).__init__(container) + self.required = required + self._container = container + + def _add_action(self, action): + if action.required: + msg = _('mutually exclusive arguments must be optional') + raise ValueError(msg) + action = self._container._add_action(action) + self._group_actions.append(action) + return action + + def _remove_action(self, action): + self._container._remove_action(action) + self._group_actions.remove(action) + + +class ArgumentParser(_AttributeHolder, _ActionsContainer): + """Object for parsing command line strings into Python objects. + + Keyword Arguments: + - prog -- The name of the program (default: sys.argv[0]) + - usage -- A usage message (default: auto-generated from arguments) + - description -- A description of what the program does + - epilog -- Text following the argument descriptions + - parents -- Parsers whose arguments should be copied into this one + - formatter_class -- HelpFormatter class for printing help messages + - prefix_chars -- Characters that prefix optional arguments + - fromfile_prefix_chars -- Characters that prefix files containing + additional arguments + - argument_default -- The default value for all arguments + - conflict_handler -- String indicating how to handle conflicts + - add_help -- Add a -h/-help option + """ + + def __init__(self, + prog=None, + usage=None, + description=None, + epilog=None, + version=None, + parents=[], + formatter_class=HelpFormatter, + prefix_chars='-', + fromfile_prefix_chars=None, + argument_default=None, + conflict_handler='error', + add_help=True): + + if version is not None: + import warnings + warnings.warn( + """The "version" argument to ArgumentParser is deprecated. """ + """Please use """ + """"add_argument(..., action='version', version="N", ...)" """ + """instead""", DeprecationWarning) + + superinit = super(ArgumentParser, self).__init__ + superinit(description=description, + prefix_chars=prefix_chars, + argument_default=argument_default, + conflict_handler=conflict_handler) + + # default setting for prog + if prog is None: + prog = _os.path.basename(_sys.argv[0]) + + self.prog = prog + self.usage = usage + self.epilog = epilog + self.version = version + self.formatter_class = formatter_class + self.fromfile_prefix_chars = fromfile_prefix_chars + self.add_help = add_help + + add_group = self.add_argument_group + self._positionals = add_group(_('positional arguments')) + self._optionals = add_group(_('optional arguments')) + self._subparsers = None + + # register types + def identity(string): + return string + self.register('type', None, identity) + + # add help and version arguments if necessary + # (using explicit default to override global argument_default) + if '-' in prefix_chars: + default_prefix = '-' + else: + default_prefix = prefix_chars[0] + if self.add_help: + self.add_argument( + default_prefix+'h', default_prefix*2+'help', + action='help', default=SUPPRESS, + help=_('show this help message and exit')) + if self.version: + self.add_argument( + default_prefix+'v', default_prefix*2+'version', + action='version', default=SUPPRESS, + version=self.version, + help=_("show program's version number and exit")) + + # add parent arguments and defaults + for parent in parents: + self._add_container_actions(parent) + try: + defaults = parent._defaults + except AttributeError: + pass + else: + self._defaults.update(defaults) + + # ======================= + # Pretty __repr__ methods + # ======================= + def _get_kwargs(self): + names = [ + 'prog', + 'usage', + 'description', + 'version', + 'formatter_class', + 'conflict_handler', + 'add_help', + ] + return [(name, getattr(self, name)) for name in names] + + # ================================== + # Optional/Positional adding methods + # ================================== + def add_subparsers(self, **kwargs): + if self._subparsers is not None: + self.error(_('cannot have multiple subparser arguments')) + + # add the parser class to the arguments if it's not present + kwargs.setdefault('parser_class', type(self)) + + if 'title' in kwargs or 'description' in kwargs: + title = _(kwargs.pop('title', 'subcommands')) + description = _(kwargs.pop('description', None)) + self._subparsers = self.add_argument_group(title, description) + else: + self._subparsers = self._positionals + + # prog defaults to the usage message of this parser, skipping + # optional arguments and with no "usage:" prefix + if kwargs.get('prog') is None: + formatter = self._get_formatter() + positionals = self._get_positional_actions() + groups = self._mutually_exclusive_groups + formatter.add_usage(self.usage, positionals, groups, '') + kwargs['prog'] = formatter.format_help().strip() + + # create the parsers action and add it to the positionals list + parsers_class = self._pop_action_class(kwargs, 'parsers') + action = parsers_class(option_strings=[], **kwargs) + self._subparsers._add_action(action) + + # return the created parsers action + return action + + def _add_action(self, action): + if action.option_strings: + self._optionals._add_action(action) + else: + self._positionals._add_action(action) + return action + + def _get_optional_actions(self): + return [action + for action in self._actions + if action.option_strings] + + def _get_positional_actions(self): + return [action + for action in self._actions + if not action.option_strings] + + # ===================================== + # Command line argument parsing methods + # ===================================== + def parse_args(self, args=None, namespace=None): + args, argv = self.parse_known_args(args, namespace) + if argv: + msg = _('unrecognized arguments: %s') + self.error(msg % ' '.join(argv)) + return args + + def parse_known_args(self, args=None, namespace=None): + # args default to the system args + if args is None: + args = _sys.argv[1:] + + # default Namespace built from parser defaults + if namespace is None: + namespace = Namespace() + + # add any action defaults that aren't present + for action in self._actions: + if action.dest is not SUPPRESS: + if not hasattr(namespace, action.dest): + if action.default is not SUPPRESS: + default = action.default + if isinstance(action.default, basestring): + default = self._get_value(action, default) + setattr(namespace, action.dest, default) + + # add any parser defaults that aren't present + for dest in self._defaults: + if not hasattr(namespace, dest): + setattr(namespace, dest, self._defaults[dest]) + + # parse the arguments and exit if there are any errors + try: + namespace, args = self._parse_known_args(args, namespace) + if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): + args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) + delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) + return namespace, args + except ArgumentError: + err = _sys.exc_info()[1] + self.error(str(err)) + + def _parse_known_args(self, arg_strings, namespace): + # replace arg strings that are file references + if self.fromfile_prefix_chars is not None: + arg_strings = self._read_args_from_files(arg_strings) + + # map all mutually exclusive arguments to the other arguments + # they can't occur with + action_conflicts = {} + for mutex_group in self._mutually_exclusive_groups: + group_actions = mutex_group._group_actions + for i, mutex_action in enumerate(mutex_group._group_actions): + conflicts = action_conflicts.setdefault(mutex_action, []) + conflicts.extend(group_actions[:i]) + conflicts.extend(group_actions[i + 1:]) + + # find all option indices, and determine the arg_string_pattern + # which has an 'O' if there is an option at an index, + # an 'A' if there is an argument, or a '-' if there is a '--' + option_string_indices = {} + arg_string_pattern_parts = [] + arg_strings_iter = iter(arg_strings) + for i, arg_string in enumerate(arg_strings_iter): + + # all args after -- are non-options + if arg_string == '--': + arg_string_pattern_parts.append('-') + for arg_string in arg_strings_iter: + arg_string_pattern_parts.append('A') + + # otherwise, add the arg to the arg strings + # and note the index if it was an option + else: + option_tuple = self._parse_optional(arg_string) + if option_tuple is None: + pattern = 'A' + else: + option_string_indices[i] = option_tuple + pattern = 'O' + arg_string_pattern_parts.append(pattern) + + # join the pieces together to form the pattern + arg_strings_pattern = ''.join(arg_string_pattern_parts) + + # converts arg strings to the appropriate and then takes the action + seen_actions = set() + seen_non_default_actions = set() + + def take_action(action, argument_strings, option_string=None): + seen_actions.add(action) + argument_values = self._get_values(action, argument_strings) + + # error if this argument is not allowed with other previously + # seen arguments, assuming that actions that use the default + # value don't really count as "present" + if argument_values is not action.default: + seen_non_default_actions.add(action) + for conflict_action in action_conflicts.get(action, []): + if conflict_action in seen_non_default_actions: + msg = _('not allowed with argument %s') + action_name = _get_action_name(conflict_action) + raise ArgumentError(action, msg % action_name) + + # take the action if we didn't receive a SUPPRESS value + # (e.g. from a default) + if argument_values is not SUPPRESS: + action(self, namespace, argument_values, option_string) + + # function to convert arg_strings into an optional action + def consume_optional(start_index): + + # get the optional identified at this index + option_tuple = option_string_indices[start_index] + action, option_string, explicit_arg = option_tuple + + # identify additional optionals in the same arg string + # (e.g. -xyz is the same as -x -y -z if no args are required) + match_argument = self._match_argument + action_tuples = [] + while True: + + # if we found no optional action, skip it + if action is None: + extras.append(arg_strings[start_index]) + return start_index + 1 + + # if there is an explicit argument, try to match the + # optional's string arguments to only this + if explicit_arg is not None: + arg_count = match_argument(action, 'A') + + # if the action is a single-dash option and takes no + # arguments, try to parse more single-dash options out + # of the tail of the option string + chars = self.prefix_chars + if arg_count == 0 and option_string[1] not in chars: + action_tuples.append((action, [], option_string)) + char = option_string[0] + option_string = char + explicit_arg[0] + new_explicit_arg = explicit_arg[1:] or None + optionals_map = self._option_string_actions + if option_string in optionals_map: + action = optionals_map[option_string] + explicit_arg = new_explicit_arg + else: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) + + # if the action expect exactly one argument, we've + # successfully matched the option; exit the loop + elif arg_count == 1: + stop = start_index + 1 + args = [explicit_arg] + action_tuples.append((action, args, option_string)) + break + + # error if a double-dash option did not use the + # explicit argument + else: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) + + # if there is no explicit argument, try to match the + # optional's string arguments with the following strings + # if successful, exit the loop + else: + start = start_index + 1 + selected_patterns = arg_strings_pattern[start:] + arg_count = match_argument(action, selected_patterns) + stop = start + arg_count + args = arg_strings[start:stop] + action_tuples.append((action, args, option_string)) + break + + # add the Optional to the list and return the index at which + # the Optional's string args stopped + assert action_tuples + for action, args, option_string in action_tuples: + take_action(action, args, option_string) + return stop + + # the list of Positionals left to be parsed; this is modified + # by consume_positionals() + positionals = self._get_positional_actions() + + # function to convert arg_strings into positional actions + def consume_positionals(start_index): + # match as many Positionals as possible + match_partial = self._match_arguments_partial + selected_pattern = arg_strings_pattern[start_index:] + arg_counts = match_partial(positionals, selected_pattern) + + # slice off the appropriate arg strings for each Positional + # and add the Positional and its args to the list + for action, arg_count in zip(positionals, arg_counts): + args = arg_strings[start_index: start_index + arg_count] + start_index += arg_count + take_action(action, args) + + # slice off the Positionals that we just parsed and return the + # index at which the Positionals' string args stopped + positionals[:] = positionals[len(arg_counts):] + return start_index + + # consume Positionals and Optionals alternately, until we have + # passed the last option string + extras = [] + start_index = 0 + if option_string_indices: + max_option_string_index = max(option_string_indices) + else: + max_option_string_index = -1 + while start_index <= max_option_string_index: + + # consume any Positionals preceding the next option + next_option_string_index = min([ + index + for index in option_string_indices + if index >= start_index]) + if start_index != next_option_string_index: + positionals_end_index = consume_positionals(start_index) + + # only try to parse the next optional if we didn't consume + # the option string during the positionals parsing + if positionals_end_index > start_index: + start_index = positionals_end_index + continue + else: + start_index = positionals_end_index + + # if we consumed all the positionals we could and we're not + # at the index of an option string, there were extra arguments + if start_index not in option_string_indices: + strings = arg_strings[start_index:next_option_string_index] + extras.extend(strings) + start_index = next_option_string_index + + # consume the next optional and any arguments for it + start_index = consume_optional(start_index) + + # consume any positionals following the last Optional + stop_index = consume_positionals(start_index) + + # if we didn't consume all the argument strings, there were extras + extras.extend(arg_strings[stop_index:]) + + # if we didn't use all the Positional objects, there were too few + # arg strings supplied. + if positionals: + self.error(_('too few arguments')) + + # make sure all required actions were present + for action in self._actions: + if action.required: + if action not in seen_actions: + name = _get_action_name(action) + self.error(_('argument %s is required') % name) + + # make sure all required groups had one option present + for group in self._mutually_exclusive_groups: + if group.required: + for action in group._group_actions: + if action in seen_non_default_actions: + break + + # if no actions were used, report the error + else: + names = [_get_action_name(action) + for action in group._group_actions + if action.help is not SUPPRESS] + msg = _('one of the arguments %s is required') + self.error(msg % ' '.join(names)) + + # return the updated namespace and the extra arguments + return namespace, extras + + def _read_args_from_files(self, arg_strings): + # expand arguments referencing files + new_arg_strings = [] + for arg_string in arg_strings: + + # for regular arguments, just add them back into the list + if arg_string[0] not in self.fromfile_prefix_chars: + new_arg_strings.append(arg_string) + + # replace arguments referencing files with the file content + else: + try: + args_file = open(arg_string[1:]) + try: + arg_strings = [] + for arg_line in args_file.read().splitlines(): + for arg in self.convert_arg_line_to_args(arg_line): + arg_strings.append(arg) + arg_strings = self._read_args_from_files(arg_strings) + new_arg_strings.extend(arg_strings) + finally: + args_file.close() + except IOError: + err = _sys.exc_info()[1] + self.error(str(err)) + + # return the modified argument list + return new_arg_strings + + def convert_arg_line_to_args(self, arg_line): + return [arg_line] + + def _match_argument(self, action, arg_strings_pattern): + # match the pattern for this action to the arg strings + nargs_pattern = self._get_nargs_pattern(action) + match = _re.match(nargs_pattern, arg_strings_pattern) + + # raise an exception if we weren't able to find a match + if match is None: + nargs_errors = { + None: _('expected one argument'), + OPTIONAL: _('expected at most one argument'), + ONE_OR_MORE: _('expected at least one argument'), + } + default = _('expected %s argument(s)') % action.nargs + msg = nargs_errors.get(action.nargs, default) + raise ArgumentError(action, msg) + + # return the number of arguments matched + return len(match.group(1)) + + def _match_arguments_partial(self, actions, arg_strings_pattern): + # progressively shorten the actions list by slicing off the + # final actions until we find a match + result = [] + for i in range(len(actions), 0, -1): + actions_slice = actions[:i] + pattern = ''.join([self._get_nargs_pattern(action) + for action in actions_slice]) + match = _re.match(pattern, arg_strings_pattern) + if match is not None: + result.extend([len(string) for string in match.groups()]) + break + + # return the list of arg string counts + return result + + def _parse_optional(self, arg_string): + # if it's an empty string, it was meant to be a positional + if not arg_string: + return None + + # if it doesn't start with a prefix, it was meant to be positional + if not arg_string[0] in self.prefix_chars: + return None + + # if the option string is present in the parser, return the action + if arg_string in self._option_string_actions: + action = self._option_string_actions[arg_string] + return action, arg_string, None + + # if it's just a single character, it was meant to be positional + if len(arg_string) == 1: + return None + + # if the option string before the "=" is present, return the action + if '=' in arg_string: + option_string, explicit_arg = arg_string.split('=', 1) + if option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, explicit_arg + + # search through all possible prefixes of the option string + # and all actions in the parser for possible interpretations + option_tuples = self._get_option_tuples(arg_string) + + # if multiple actions match, the option string was ambiguous + if len(option_tuples) > 1: + options = ', '.join([option_string + for action, option_string, explicit_arg in option_tuples]) + tup = arg_string, options + self.error(_('ambiguous option: %s could match %s') % tup) + + # if exactly one action matched, this segmentation is good, + # so return the parsed action + elif len(option_tuples) == 1: + option_tuple, = option_tuples + return option_tuple + + # if it was not found as an option, but it looks like a negative + # number, it was meant to be positional + # unless there are negative-number-like options + if self._negative_number_matcher.match(arg_string): + if not self._has_negative_number_optionals: + return None + + # if it contains a space, it was meant to be a positional + if ' ' in arg_string: + return None + + # it was meant to be an optional but there is no such option + # in this parser (though it might be a valid option in a subparser) + return None, arg_string, None + + def _get_option_tuples(self, option_string): + result = [] + + # option strings starting with two prefix characters are only + # split at the '=' + chars = self.prefix_chars + if option_string[0] in chars and option_string[1] in chars: + if '=' in option_string: + option_prefix, explicit_arg = option_string.split('=', 1) + else: + option_prefix = option_string + explicit_arg = None + for option_string in self._option_string_actions: + if option_string.startswith(option_prefix): + action = self._option_string_actions[option_string] + tup = action, option_string, explicit_arg + result.append(tup) + + # single character options can be concatenated with their arguments + # but multiple character options always have to have their argument + # separate + elif option_string[0] in chars and option_string[1] not in chars: + option_prefix = option_string + explicit_arg = None + short_option_prefix = option_string[:2] + short_explicit_arg = option_string[2:] + + for option_string in self._option_string_actions: + if option_string == short_option_prefix: + action = self._option_string_actions[option_string] + tup = action, option_string, short_explicit_arg + result.append(tup) + elif option_string.startswith(option_prefix): + action = self._option_string_actions[option_string] + tup = action, option_string, explicit_arg + result.append(tup) + + # shouldn't ever get here + else: + self.error(_('unexpected option string: %s') % option_string) + + # return the collected option tuples + return result + + def _get_nargs_pattern(self, action): + # in all examples below, we have to allow for '--' args + # which are represented as '-' in the pattern + nargs = action.nargs + + # the default (None) is assumed to be a single argument + if nargs is None: + nargs_pattern = '(-*A-*)' + + # allow zero or one arguments + elif nargs == OPTIONAL: + nargs_pattern = '(-*A?-*)' + + # allow zero or more arguments + elif nargs == ZERO_OR_MORE: + nargs_pattern = '(-*[A-]*)' + + # allow one or more arguments + elif nargs == ONE_OR_MORE: + nargs_pattern = '(-*A[A-]*)' + + # allow any number of options or arguments + elif nargs == REMAINDER: + nargs_pattern = '([-AO]*)' + + # allow one argument followed by any number of options or arguments + elif nargs == PARSER: + nargs_pattern = '(-*A[-AO]*)' + + # all others should be integers + else: + nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) + + # if this is an optional action, -- is not allowed + if action.option_strings: + nargs_pattern = nargs_pattern.replace('-*', '') + nargs_pattern = nargs_pattern.replace('-', '') + + # return the pattern + return nargs_pattern + + # ======================== + # Value conversion methods + # ======================== + def _get_values(self, action, arg_strings): + # for everything but PARSER args, strip out '--' + if action.nargs not in [PARSER, REMAINDER]: + arg_strings = [s for s in arg_strings if s != '--'] + + # optional argument produces a default when not present + if not arg_strings and action.nargs == OPTIONAL: + if action.option_strings: + value = action.const + else: + value = action.default + if isinstance(value, basestring): + value = self._get_value(action, value) + self._check_value(action, value) + + # when nargs='*' on a positional, if there were no command-line + # args, use the default if it is anything other than None + elif (not arg_strings and action.nargs == ZERO_OR_MORE and + not action.option_strings): + if action.default is not None: + value = action.default + else: + value = arg_strings + self._check_value(action, value) + + # single argument or optional argument produces a single value + elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: + arg_string, = arg_strings + value = self._get_value(action, arg_string) + self._check_value(action, value) + + # REMAINDER arguments convert all values, checking none + elif action.nargs == REMAINDER: + value = [self._get_value(action, v) for v in arg_strings] + + # PARSER arguments convert all values, but check only the first + elif action.nargs == PARSER: + value = [self._get_value(action, v) for v in arg_strings] + self._check_value(action, value[0]) + + # all other types of nargs produce a list + else: + value = [self._get_value(action, v) for v in arg_strings] + for v in value: + self._check_value(action, v) + + # return the converted value + return value + + def _get_value(self, action, arg_string): + type_func = self._registry_get('type', action.type, action.type) + if not _callable(type_func): + msg = _('%r is not callable') + raise ArgumentError(action, msg % type_func) + + # convert the value to the appropriate type + try: + result = type_func(arg_string) + + # ArgumentTypeErrors indicate errors + except ArgumentTypeError: + name = getattr(action.type, '__name__', repr(action.type)) + msg = str(_sys.exc_info()[1]) + raise ArgumentError(action, msg) + + # TypeErrors or ValueErrors also indicate errors + except (TypeError, ValueError): + name = getattr(action.type, '__name__', repr(action.type)) + msg = _('invalid %s value: %r') + raise ArgumentError(action, msg % (name, arg_string)) + + # return the converted value + return result + + def _check_value(self, action, value): + # converted value must be one of the choices (if specified) + if action.choices is not None and value not in action.choices: + tup = value, ', '.join(map(repr, action.choices)) + msg = _('invalid choice: %r (choose from %s)') % tup + raise ArgumentError(action, msg) + + # ======================= + # Help-formatting methods + # ======================= + def format_usage(self): + formatter = self._get_formatter() + formatter.add_usage(self.usage, self._actions, + self._mutually_exclusive_groups) + return formatter.format_help() + + def format_help(self): + formatter = self._get_formatter() + + # usage + formatter.add_usage(self.usage, self._actions, + self._mutually_exclusive_groups) + + # description + formatter.add_text(self.description) + + # positionals, optionals and user-defined groups + for action_group in self._action_groups: + formatter.start_section(action_group.title) + formatter.add_text(action_group.description) + formatter.add_arguments(action_group._group_actions) + formatter.end_section() + + # epilog + formatter.add_text(self.epilog) + + # determine help from format above + return formatter.format_help() + + def format_version(self): + import warnings + warnings.warn( + 'The format_version method is deprecated -- the "version" ' + 'argument to ArgumentParser is no longer supported.', + DeprecationWarning) + formatter = self._get_formatter() + formatter.add_text(self.version) + return formatter.format_help() + + def _get_formatter(self): + return self.formatter_class(prog=self.prog) + + # ===================== + # Help-printing methods + # ===================== + def print_usage(self, file=None): + if file is None: + file = _sys.stdout + self._print_message(self.format_usage(), file) + + def print_help(self, file=None): + if file is None: + file = _sys.stdout + self._print_message(self.format_help(), file) + + def print_version(self, file=None): + import warnings + warnings.warn( + 'The print_version method is deprecated -- the "version" ' + 'argument to ArgumentParser is no longer supported.', + DeprecationWarning) + self._print_message(self.format_version(), file) + + def _print_message(self, message, file=None): + if message: + if file is None: + file = _sys.stderr + file.write(message) + + # =============== + # Exiting methods + # =============== + def exit(self, status=0, message=None): + if message: + self._print_message(message, _sys.stderr) + _sys.exit(status) + + def error(self, message): + """error(message: string) + + Prints a usage message incorporating the message to stderr and + exits. + + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + self.print_usage(_sys.stderr) + self.exit(2, _('%s: error: %s\n') % (self.prog, message)) diff --git a/lib/spack/external/functools.py b/lib/spack/external/functools.py new file mode 100644 index 0000000000..19f0903c82 --- /dev/null +++ b/lib/spack/external/functools.py @@ -0,0 +1,30 @@ +# +# Backport of Python 2.7's total_ordering. +# + +def total_ordering(cls): + """Class decorator that fills in missing ordering methods""" + convert = { + '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), + ('__le__', lambda self, other: self < other or self == other), + ('__ge__', lambda self, other: not self < other)], + '__le__': [('__ge__', lambda self, other: not self <= other or self == other), + ('__lt__', lambda self, other: self <= other and not self == other), + ('__gt__', lambda self, other: not self <= other)], + '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), + ('__ge__', lambda self, other: self > other or self == other), + ('__le__', lambda self, other: not self > other)], + '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), + ('__gt__', lambda self, other: self >= other and not self == other), + ('__lt__', lambda self, other: not self >= other)] + } + roots = set(dir(cls)) & set(convert) + if not roots: + raise ValueError('must define at least one ordering operation: < > <= >=') + root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ + for opname, opfunc in convert[root]: + if opname not in roots: + opfunc.__name__ = opname + opfunc.__doc__ = getattr(int, opname).__doc__ + setattr(cls, opname, opfunc) + return cls diff --git a/lib/spack/external/ordereddict.py b/lib/spack/external/ordereddict.py new file mode 100644 index 0000000000..8ddad1477e --- /dev/null +++ b/lib/spack/external/ordereddict.py @@ -0,0 +1,262 @@ +# +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. +# +# From http://code.activestate.com/recipes/576693-ordered-dictionary-for-py24/ +# This file is in the public domain, and has no particular license. +# +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) diff --git a/lib/spack/external/pyqver2.py b/lib/spack/external/pyqver2.py new file mode 100755 index 0000000000..cd45bf948f --- /dev/null +++ b/lib/spack/external/pyqver2.py @@ -0,0 +1,393 @@ +#!/usr/bin/env python +# +# pyqver2.py +# by Greg Hewgill +# https://github.com/ghewgill/pyqver +# +# This software is provided 'as-is', without any express or implied +# warranty. In no event will the author be held liable for any damages +# arising from the use of this software. +# +# Permission is granted to anyone to use this software for any purpose, +# including commercial applications, and to alter it and redistribute it +# freely, subject to the following restrictions: +# +# 1. The origin of this software must not be misrepresented; you must not +# claim that you wrote the original software. If you use this software +# in a product, an acknowledgment in the product documentation would be +# appreciated but is not required. +# 2. Altered source versions must be plainly marked as such, and must not be +# misrepresented as being the original software. +# 3. This notice may not be removed or altered from any source distribution. +# +# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com +# + +import compiler +import platform +import sys + +StandardModules = { + "__future__": (2, 1), + "abc": (2, 6), + "argparse": (2, 7), + "ast": (2, 6), + "atexit": (2, 0), + "bz2": (2, 3), + "cgitb": (2, 2), + "collections": (2, 4), + "contextlib": (2, 5), + "cookielib": (2, 4), + "cProfile": (2, 5), + "csv": (2, 3), + "ctypes": (2, 5), + "datetime": (2, 3), + "decimal": (2, 4), + "difflib": (2, 1), + "DocXMLRPCServer": (2, 3), + "dummy_thread": (2, 3), + "dummy_threading": (2, 3), + "email": (2, 2), + "fractions": (2, 6), + "functools": (2, 5), + "future_builtins": (2, 6), + "hashlib": (2, 5), + "heapq": (2, 3), + "hmac": (2, 2), + "hotshot": (2, 2), + "HTMLParser": (2, 2), + "importlib": (2, 7), + "inspect": (2, 1), + "io": (2, 6), + "itertools": (2, 3), + "json": (2, 6), + "logging": (2, 3), + "modulefinder": (2, 3), + "msilib": (2, 5), + "multiprocessing": (2, 6), + "netrc": (1, 5, 2), + "numbers": (2, 6), + "optparse": (2, 3), + "ossaudiodev": (2, 3), + "pickletools": (2, 3), + "pkgutil": (2, 3), + "platform": (2, 3), + "pydoc": (2, 1), + "runpy": (2, 5), + "sets": (2, 3), + "shlex": (1, 5, 2), + "SimpleXMLRPCServer": (2, 2), + "spwd": (2, 5), + "sqlite3": (2, 5), + "ssl": (2, 6), + "stringprep": (2, 3), + "subprocess": (2, 4), + "sysconfig": (2, 7), + "tarfile": (2, 3), + "textwrap": (2, 3), + "timeit": (2, 3), + "unittest": (2, 1), + "uuid": (2, 5), + "warnings": (2, 1), + "weakref": (2, 1), + "winsound": (1, 5, 2), + "wsgiref": (2, 5), + "xml.dom": (2, 0), + "xml.dom.minidom": (2, 0), + "xml.dom.pulldom": (2, 0), + "xml.etree.ElementTree": (2, 5), + "xml.parsers.expat":(2, 0), + "xml.sax": (2, 0), + "xml.sax.handler": (2, 0), + "xml.sax.saxutils": (2, 0), + "xml.sax.xmlreader":(2, 0), + "xmlrpclib": (2, 2), + "zipfile": (1, 6), + "zipimport": (2, 3), + "_ast": (2, 5), + "_winreg": (2, 0), +} + +Functions = { + "all": (2, 5), + "any": (2, 5), + "collections.Counter": (2, 7), + "collections.defaultdict": (2, 5), + "collections.OrderedDict": (2, 7), + "functools.total_ordering": (2, 7), + "enumerate": (2, 3), + "frozenset": (2, 4), + "itertools.compress": (2, 7), + "math.erf": (2, 7), + "math.erfc": (2, 7), + "math.expm1": (2, 7), + "math.gamma": (2, 7), + "math.lgamma": (2, 7), + "memoryview": (2, 7), + "next": (2, 6), + "os.getresgid": (2, 7), + "os.getresuid": (2, 7), + "os.initgroups": (2, 7), + "os.setresgid": (2, 7), + "os.setresuid": (2, 7), + "reversed": (2, 4), + "set": (2, 4), + "subprocess.check_call": (2, 5), + "subprocess.check_output": (2, 7), + "sum": (2, 3), + "symtable.is_declared_global": (2, 7), + "weakref.WeakSet": (2, 7), +} + +Identifiers = { + "False": (2, 2), + "True": (2, 2), +} + +def uniq(a): + if len(a) == 0: + return [] + else: + return [a[0]] + uniq([x for x in a if x != a[0]]) + +class NodeChecker(object): + def __init__(self): + self.vers = dict() + self.vers[(2,0)] = [] + def add(self, node, ver, msg): + if ver not in self.vers: + self.vers[ver] = [] + self.vers[ver].append((node.lineno, msg)) + def default(self, node): + for child in node.getChildNodes(): + self.visit(child) + def visitCallFunc(self, node): + def rollup(n): + if isinstance(n, compiler.ast.Name): + return n.name + elif isinstance(n, compiler.ast.Getattr): + r = rollup(n.expr) + if r: + return r + "." + n.attrname + name = rollup(node.node) + if name: + v = Functions.get(name) + if v is not None: + self.add(node, v, name) + self.default(node) + def visitClass(self, node): + if node.bases: + self.add(node, (2,2), "new-style class") + if node.decorators: + self.add(node, (2,6), "class decorator") + self.default(node) + def visitDictComp(self, node): + self.add(node, (2,7), "dictionary comprehension") + self.default(node) + def visitFloorDiv(self, node): + self.add(node, (2,2), "// operator") + self.default(node) + def visitFrom(self, node): + v = StandardModules.get(node.modname) + if v is not None: + self.add(node, v, node.modname) + for n in node.names: + name = node.modname + "." + n[0] + v = Functions.get(name) + if v is not None: + self.add(node, v, name) + def visitFunction(self, node): + if node.decorators: + self.add(node, (2,4), "function decorator") + self.default(node) + def visitGenExpr(self, node): + self.add(node, (2,4), "generator expression") + self.default(node) + def visitGetattr(self, node): + if (isinstance(node.expr, compiler.ast.Const) + and isinstance(node.expr.value, str) + and node.attrname == "format"): + self.add(node, (2,6), "string literal .format()") + self.default(node) + def visitIfExp(self, node): + self.add(node, (2,5), "inline if expression") + self.default(node) + def visitImport(self, node): + for n in node.names: + v = StandardModules.get(n[0]) + if v is not None: + self.add(node, v, n[0]) + self.default(node) + def visitName(self, node): + v = Identifiers.get(node.name) + if v is not None: + self.add(node, v, node.name) + self.default(node) + def visitSet(self, node): + self.add(node, (2,7), "set literal") + self.default(node) + def visitSetComp(self, node): + self.add(node, (2,7), "set comprehension") + self.default(node) + def visitTryFinally(self, node): + # try/finally with a suite generates a Stmt node as the body, + # but try/except/finally generates a TryExcept as the body + if isinstance(node.body, compiler.ast.TryExcept): + self.add(node, (2,5), "try/except/finally") + self.default(node) + def visitWith(self, node): + if isinstance(node.body, compiler.ast.With): + self.add(node, (2,7), "with statement with multiple contexts") + else: + self.add(node, (2,5), "with statement") + self.default(node) + def visitYield(self, node): + self.add(node, (2,2), "yield expression") + self.default(node) + +def get_versions(source): + """Return information about the Python versions required for specific features. + + The return value is a dictionary with keys as a version number as a tuple + (for example Python 2.6 is (2,6)) and the value are a list of features that + require the indicated Python version. + """ + tree = compiler.parse(source) + checker = compiler.walk(tree, NodeChecker()) + return checker.vers + +def v27(source): + if sys.version_info >= (2, 7): + return qver(source) + else: + print >>sys.stderr, "Not all features tested, run --test with Python 2.7" + return (2, 7) + +def qver(source): + """Return the minimum Python version required to run a particular bit of code. + + >>> qver('print "hello world"') + (2, 0) + >>> qver('class test(object): pass') + (2, 2) + >>> qver('yield 1') + (2, 2) + >>> qver('a // b') + (2, 2) + >>> qver('True') + (2, 2) + >>> qver('enumerate(a)') + (2, 3) + >>> qver('total = sum') + (2, 0) + >>> qver('sum(a)') + (2, 3) + >>> qver('(x*x for x in range(5))') + (2, 4) + >>> qver('class C:\\n @classmethod\\n def m(): pass') + (2, 4) + >>> qver('y if x else z') + (2, 5) + >>> qver('import hashlib') + (2, 5) + >>> qver('from hashlib import md5') + (2, 5) + >>> qver('import xml.etree.ElementTree') + (2, 5) + >>> qver('try:\\n try: pass;\\n except: pass;\\nfinally: pass') + (2, 0) + >>> qver('try: pass;\\nexcept: pass;\\nfinally: pass') + (2, 5) + >>> qver('from __future__ import with_statement\\nwith x: pass') + (2, 5) + >>> qver('collections.defaultdict(list)') + (2, 5) + >>> qver('from collections import defaultdict') + (2, 5) + >>> qver('"{0}".format(0)') + (2, 6) + >>> qver('memoryview(x)') + (2, 7) + >>> v27('{1, 2, 3}') + (2, 7) + >>> v27('{x for x in s}') + (2, 7) + >>> v27('{x: y for x in s}') + (2, 7) + >>> qver('from __future__ import with_statement\\nwith x:\\n with y: pass') + (2, 5) + >>> v27('from __future__ import with_statement\\nwith x, y: pass') + (2, 7) + >>> qver('@decorator\\ndef f(): pass') + (2, 4) + >>> qver('@decorator\\nclass test:\\n pass') + (2, 6) + + #>>> qver('0o0') + #(2, 6) + #>>> qver('@foo\\nclass C: pass') + #(2, 6) + """ + return max(get_versions(source).keys()) + + +if __name__ == '__main__': + + Verbose = False + MinVersion = (2, 3) + Lint = False + + files = [] + i = 1 + while i < len(sys.argv): + a = sys.argv[i] + if a == "--test": + import doctest + doctest.testmod() + sys.exit(0) + if a == "-v" or a == "--verbose": + Verbose = True + elif a == "-l" or a == "--lint": + Lint = True + elif a == "-m" or a == "--min-version": + i += 1 + MinVersion = tuple(map(int, sys.argv[i].split("."))) + else: + files.append(a) + i += 1 + + if not files: + print >>sys.stderr, """Usage: %s [options] source ... + + Report minimum Python version required to run given source files. + + -m x.y or --min-version x.y (default 2.3) + report version triggers at or above version x.y in verbose mode + -v or --verbose + print more detailed report of version triggers for each version + """ % sys.argv[0] + sys.exit(1) + + for fn in files: + try: + f = open(fn) + source = f.read() + f.close() + ver = get_versions(source) + if Verbose: + print fn + for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True): + reasons = [x for x in uniq(ver[v]) if x] + if reasons: + # each reason is (lineno, message) + print "\t%s\t%s" % (".".join(map(str, v)), ", ".join([x[1] for x in reasons])) + elif Lint: + for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True): + reasons = [x for x in uniq(ver[v]) if x] + for r in reasons: + # each reason is (lineno, message) + print "%s:%s: %s %s" % (fn, r[0], ".".join(map(str, v)), r[1]) + else: + print "%s\t%s" % (".".join(map(str, max(ver.keys()))), fn) + except SyntaxError, x: + print "%s: syntax error compiling with Python %s: %s" % (fn, platform.python_version(), x) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 11c3dee604..3782aefcce 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -124,8 +124,19 @@ def expand_user(path): return path.replace('%u', username) +def mkdirp(*paths): + for path in paths: + if not os.path.exists(path): + os.makedirs(path) + elif not os.path.isdir(path): + raise OSError(errno.EEXIST, "File alredy exists", path) + + @contextmanager -def working_dir(dirname): +def working_dir(dirname, **kwargs): + if kwargs.get('create', False): + mkdirp(dirname) + orig_dir = os.getcwd() os.chdir(dirname) yield @@ -137,14 +148,6 @@ def touch(path): os.utime(path, None) -def mkdirp(*paths): - for path in paths: - if not os.path.exists(path): - os.makedirs(path) - elif not os.path.isdir(path): - raise OSError(errno.EEXIST, "File alredy exists", path) - - def join_path(prefix, *args): path = str(prefix) for elt in args: diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 7590fb1298..ce7d0197f0 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -119,9 +119,8 @@ def caller_locals(): def get_calling_package_name(): - """Make sure that the caller is a class definition, and return - the module's name. This is useful for getting the name of - spack packages from inside a relation function. + """Make sure that the caller is a class definition, and return the + module's name. """ stack = inspect.stack() try: @@ -144,8 +143,9 @@ def get_calling_package_name(): def attr_required(obj, attr_name): """Ensure that a class has a required attribute.""" if not hasattr(obj, attr_name): - tty.die("No required attribute '%s' in class '%s'" - % (attr_name, obj.__class__.__name__)) + raise RequiredAttributeError( + "No required attribute '%s' in class '%s'" + % (attr_name, obj.__class__.__name__)) def attr_setdefault(obj, name, value): @@ -259,3 +259,8 @@ def in_function(function_name): return False finally: del stack + + +class RequiredAttributeError(ValueError): + def __init__(self, message): + super(RequiredAttributeError, self).__init__(message) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index d0cf8804ba..bf91a885ca 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -22,20 +22,6 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - -# -# When packages call 'from spack import *', this is what is brought in. -# -# Spack internal code calls 'import spack' and accesses other -# variables (spack.db, paths, etc.) directly. -# -# TODO: maybe this should be separated out and should go in build_environment.py? -# TODO: it's not clear where all the stuff that needs to be included in packages -# should live. This file is overloaded for spack core vs. for packages. -__all__ = ['Package', 'when', 'provides', 'depends_on', - 'patch', 'Version', 'working_dir', 'which', 'Executable', - 'filter_file', 'change_sed_delimiter'] - import os import tempfile from llnl.util.filesystem import * @@ -58,7 +44,6 @@ var_path = join_path(prefix, "var", "spack") stage_path = join_path(var_path, "stage") install_path = join_path(prefix, "opt") share_path = join_path(prefix, "share", "spack") -dotkit_path = join_path(share_path, "dotkit") # # Set up the packages database. @@ -81,7 +66,7 @@ mock_user_config = join_path(mock_config_path, "user_spackconfig") # stage directories. # from spack.directory_layout import SpecHashDirectoryLayout -install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6) +install_layout = SpecHashDirectoryLayout(install_path) # # This controls how things are concretized in spack. @@ -141,11 +126,30 @@ do_checksum = True # sys_type = None + +# +# When packages call 'from spack import *', this extra stuff is brought in. # -# Extra imports that should be generally usable from package.py files. +# Spack internal code should call 'import spack' and accesses other +# variables (spack.db, paths, etc.) directly. +# +# TODO: maybe this should be separated out and should go in build_environment.py? +# TODO: it's not clear where all the stuff that needs to be included in packages +# should live. This file is overloaded for spack core vs. for packages. # -from llnl.util.filesystem import working_dir +__all__ = ['Package', 'Version', 'when'] from spack.package import Package -from spack.relations import depends_on, provides, patch -from spack.multimethod import when from spack.version import Version +from spack.multimethod import when + +import llnl.util.filesystem +from llnl.util.filesystem import * +__all__ += llnl.util.filesystem.__all__ + +import spack.relations +from spack.relations import * +__all__ += spack.relations.__all__ + +import spack.util.executable +from spack.util.executable import * +__all__ += spack.util.executable.__all__ diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 38d5f70282..b71c543e5d 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -84,7 +84,7 @@ class MakeExecutable(Executable): def set_compiler_environment_variables(pkg): assert(pkg.spec.concrete) - compiler = compilers.compiler_for_spec(pkg.spec.compiler) + compiler = pkg.compiler # Set compiler variables used by CMake and autotools os.environ['CC'] = 'cc' @@ -122,7 +122,7 @@ def set_build_environment_variables(pkg): # Prefixes of all of the package's dependencies go in # SPACK_DEPENDENCIES - dep_prefixes = [d.package.prefix for d in pkg.spec.dependencies.values()] + dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)] path_set(SPACK_DEPENDENCIES, dep_prefixes) # Install prefix @@ -143,6 +143,18 @@ def set_build_environment_variables(pkg): os.environ[SPACK_SPEC] = str(pkg.spec) os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir + # Add dependencies to CMAKE_PREFIX_PATH + path_set("CMAKE_PREFIX_PATH", dep_prefixes) + + # Add any pkgconfig directories to PKG_CONFIG_PATH + pkg_config_dirs = [] + for p in dep_prefixes: + for libdir in ('lib', 'lib64'): + pcdir = join_path(p, libdir, 'pkgconfig') + if os.path.isdir(pcdir): + pkg_config_dirs.append(pcdir) + path_set("PKG_CONFIG_PATH", pkg_config_dirs) + def set_module_variables_for_package(pkg): """Populate the module scope of install() with some useful functions. @@ -153,6 +165,9 @@ def set_module_variables_for_package(pkg): m.make = MakeExecutable('make', pkg.parallel) m.gmake = MakeExecutable('gmake', pkg.parallel) + # easy shortcut to os.environ + m.env = os.environ + # number of jobs spack prefers to build with. m.make_jobs = multiprocessing.cpu_count() @@ -168,7 +183,7 @@ def set_module_variables_for_package(pkg): # standard CMake arguments m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix, - '-DCMAKE_BUILD_TYPE=None'] + '-DCMAKE_BUILD_TYPE=RelWithDebInfo'] if platform.mac_ver()[0]: m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST') diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index 31c908d42b..f75b68b00a 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -23,12 +23,13 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -from subprocess import check_call, check_output +from subprocess import check_call import llnl.util.tty as tty -from llnl.util.filesystem import join_path +from llnl.util.filesystem import join_path, mkdirp import spack +from spack.util.executable import which description = "Create a new installation of spack in another prefix" @@ -38,8 +39,10 @@ def setup_parser(subparser): def get_origin_url(): git_dir = join_path(spack.prefix, '.git') - origin_url = check_output( - ['git', '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url']) + git = which('git', required=True) + origin_url = git( + '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url', + return_output=True) return origin_url.strip() @@ -49,6 +52,11 @@ def bootstrap(parser, args): tty.msg("Fetching spack from origin: %s" % origin_url) + if os.path.isfile(prefix): + tty.die("There is already a file at %s" % prefix) + + mkdirp(prefix) + if os.path.exists(join_path(prefix, '.git')): tty.die("There already seems to be a git repository in %s" % prefix) @@ -62,10 +70,11 @@ def bootstrap(parser, args): "%s/lib/spack/..." % prefix) os.chdir(prefix) - check_call(['git', 'init', '--shared', '-q']) - check_call(['git', 'remote', 'add', 'origin', origin_url]) - check_call(['git', 'fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q']) - check_call(['git', 'reset', '--hard', 'origin/master', '-q']) + git = which('git', required=True) + git('init', '--shared', '-q') + git('remote', 'add', 'origin', origin_url) + git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q') + git('reset', '--hard', 'origin/master', '-q') tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix) diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py new file mode 100644 index 0000000000..24d56db7d0 --- /dev/null +++ b/lib/spack/spack/cmd/cd.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import spack.cmd.location +import spack.modules + +description="cd to spack directories in the shell." + +def setup_parser(subparser): + """This is for decoration -- spack cd is used through spack's + shell support. This allows spack cd to print a descriptive + help message when called with -h.""" + spack.cmd.location.setup_parser(subparser) + + +def cd(parser, args): + spack.modules.print_help() diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index f5cf0d0143..5a8109b70f 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -24,7 +24,7 @@ ############################################################################## import os import re -import argparse +from external import argparse import hashlib from pprint import pprint from subprocess import CalledProcessError diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 6091cae6c8..1df9d87ae2 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -22,7 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import llnl.util.tty as tty diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index a4cd2df7e2..ac9c844a4c 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -22,7 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import llnl.util.tty as tty from llnl.util.tty.colify import colify diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index 85f9642019..283bfc19b9 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -23,7 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys -import argparse +from external import argparse import llnl.util.tty as tty diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index cc9a1342e7..c98162aca7 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -70,7 +70,7 @@ class ${class_name}(Package): homepage = "http://www.example.com" url = "${url}" - versions = ${versions} +${versions} def install(self, spec, prefix): # FIXME: Modify the configure line to suit your build system here. @@ -88,6 +88,9 @@ def setup_parser(subparser): '--keep-stage', action='store_true', dest='keep_stage', help="Don't clean up staging area when command completes.") subparser.add_argument( + '-n', '--name', dest='alternate_name', default=None, + help="Override the autodetected name for the created package.") + subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="Overwrite any existing package file with the same name.") @@ -114,25 +117,11 @@ class ConfigureGuesser(object): self.configure = '%s\n # %s' % (autotools, cmake) -def make_version_dict(ver_hash_tuples): - max_len = max(len(str(v)) for v,hfg in ver_hash_tuples) - width = max_len + 2 - format = "%-" + str(width) + "s : '%s'," - sep = '\n ' - return '{ ' + sep.join(format % ("'%s'" % v, h) - for v, h in ver_hash_tuples) + ' }' - - -def get_name(): - """Prompt user to input a package name.""" - name = "" - while not name: - new_name = raw_input("Name: ") - if spack.db.valid_name(name): - name = new_name - else: - print "Package name can only contain A-Z, a-z, 0-9, '_' and '-'" - return name +def make_version_calls(ver_hash_tuples): + """Adds a version() call to the package for each version found.""" + max_len = max(len(str(v)) for v, h in ver_hash_tuples) + format = " version(%%-%ds, '%%s')" % (max_len + 2) + return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples) def create(parser, args): @@ -140,13 +129,22 @@ def create(parser, args): # Try to deduce name and version of the new package from the URL name, version = spack.url.parse_name_and_version(url) - if not name: - tty.msg("Couldn't guess a name for this package.") - name = get_name() + + # Use a user-supplied name if one is present + name = kwargs.get(args, 'alternate_name', False) + if args.name: + name = args.name if not version: tty.die("Couldn't guess a version string from %s." % url) + if not name: + tty.die("Couldn't guess a name for this package. Try running:", "", + "spack create --name <name> <url>") + + if not spack.db.valid_name(name): + tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'") + tty.msg("This looks like a URL for %s version %s." % (name, version)) tty.msg("Creating template for package %s" % name) @@ -195,7 +193,7 @@ def create(parser, args): configure=guesser.configure, class_name=mod_to_class(name), url=url, - versions=make_version_dict(ver_hash_tuples))) + versions=make_version_calls(ver_hash_tuples))) # If everything checks out, go ahead and edit. spack.editor(pkg_path) diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py index db6be88d32..652f243b98 100644 --- a/lib/spack/spack/cmd/dependents.py +++ b/lib/spack/spack/cmd/dependents.py @@ -22,14 +22,14 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import llnl.util.tty as tty import spack import spack.cmd -description = "Show dependent packages." +description = "Show installed packages that depend on another." def setup_parser(subparser): subparser.add_argument( @@ -42,5 +42,5 @@ def dependents(parser, args): tty.die("spack dependents takes only one spec.") fmt = '$_$@$%@$+$=$#' - deps = [d.format(fmt) for d in specs[0].package.installed_dependents] - tty.msg("Dependents of %s" % specs[0].format(fmt), *deps) + deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents] + tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps) diff --git a/lib/spack/spack/cmd/dotkit.py b/lib/spack/spack/cmd/dotkit.py deleted file mode 100644 index 7a691ae5c0..0000000000 --- a/lib/spack/spack/cmd/dotkit.py +++ /dev/null @@ -1,99 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -import sys -import os -import shutil -import argparse - -import llnl.util.tty as tty -from llnl.util.lang import partition_list -from llnl.util.filesystem import mkdirp - -import spack.cmd -import spack.hooks.dotkit -from spack.spec import Spec - - -description ="Find dotkits for packages if they exist." - -def setup_parser(subparser): - subparser.add_argument( - '--refresh', action='store_true', help='Regenerate all dotkits') - - subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='spec to find a dotkit for.') - - -def dotkit_find(parser, args): - if not args.spec: - parser.parse_args(['dotkit', '-h']) - - spec = spack.cmd.parse_specs(args.spec) - if len(spec) > 1: - tty.die("You can only pass one spec.") - spec = spec[0] - - if not spack.db.exists(spec.name): - tty.die("No such package: %s" % spec.name) - - specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)] - - if len(specs) == 0: - tty.die("No installed packages match spec %s" % spec) - - if len(specs) > 1: - tty.error("Multiple matches for spec %s. Choose one:" % spec) - for s in specs: - sys.stderr.write(s.tree(color=True)) - sys.exit(1) - - match = specs[0] - if not os.path.isfile(spack.hooks.dotkit.dotkit_file(match.package)): - tty.die("No dotkit is installed for package %s." % spec) - - print match.format('$_$@$+$%@$=$#') - - -def dotkit_refresh(parser, args): - query_specs = spack.cmd.parse_specs(args.spec) - - specs = spack.db.installed_package_specs() - if query_specs: - specs = [s for s in specs - if any(s.satisfies(q) for q in query_specs)] - else: - shutil.rmtree(spack.dotkit_path, ignore_errors=False) - mkdirp(spack.dotkit_path) - - for spec in specs: - spack.hooks.dotkit.post_install(spec.package) - - - -def dotkit(parser, args): - if args.refresh: - dotkit_refresh(parser, args) - else: - dotkit_find(parser, args) diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index c96cf75c9b..3647186a3c 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -44,7 +44,7 @@ class ${class_name}(Package): homepage = "http://www.example.com" url = "http://www.example.com/${name}-1.0.tar.gz" - versions = { '1.0' : '0123456789abcdef0123456789abcdef' } + version('1.0', '0123456789abcdef0123456789abcdef') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py index 1dd8703daf..0ccebd9486 100644 --- a/lib/spack/spack/cmd/fetch.py +++ b/lib/spack/spack/cmd/fetch.py @@ -22,7 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import spack import spack.cmd diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 7f2bce119e..2238484a21 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -24,7 +24,7 @@ ############################################################################## import sys import collections -import argparse +from external import argparse from StringIO import StringIO import llnl.util.tty as tty @@ -89,7 +89,7 @@ def find(parser, args): format = " %-{}s%s".format(width) for abbrv, spec in zip(abbreviated, specs): - print format % (abbrv, spec.package.prefix) + print format % (abbrv, spec.prefix) elif args.full_specs: for spec in specs: diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 648dbf905a..bb147b30f5 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -37,15 +37,17 @@ def info(parser, args): package = spack.db.get(args.name) print "Package: ", package.name print "Homepage: ", package.homepage - print "Download: ", package.url print print "Safe versions: " - if package.versions: - colify(reversed(sorted(package.versions)), indent=4) + if not package.versions: + print("None.") else: - print "None. Use spack versions %s to get a list of downloadable versions." % package.name + maxlen = max(len(str(v)) for v in package.versions) + fmt = "%%-%ss" % maxlen + for v in reversed(sorted(package.versions)): + print " " + (fmt % v) + " " + package.url_for_version(v) print print "Dependencies:" diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 4570d6c40f..2374d02feb 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -23,7 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys -import argparse +from external import argparse import spack import spack.cmd diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py new file mode 100644 index 0000000000..06574d9725 --- /dev/null +++ b/lib/spack/spack/cmd/load.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by David Beckingsale, david@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import spack.modules + +description ="Add package to environment using modules." + +def setup_parser(subparser): + """Parser is only constructed so that this prints a nice help + message with -h. """ + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.') + + +def load(parser, args): + spack.modules.print_help() diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py new file mode 100644 index 0000000000..074d984ee6 --- /dev/null +++ b/lib/spack/spack/cmd/location.py @@ -0,0 +1,93 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +from external import argparse + +import llnl.util.tty as tty +from llnl.util.filesystem import join_path + +import spack +import spack.cmd + +description="Print out locations of various diectories used by Spack" + +def setup_parser(subparser): + global directories + directories = subparser.add_mutually_exclusive_group() + + directories.add_argument( + '-m', '--module-dir', action='store_true', help="Spack python module directory.") + directories.add_argument( + '-r', '--spack-root', action='store_true', help="Spack installation root.") + + directories.add_argument( + '-i', '--install-dir', action='store_true', + help="Install prefix for spec (spec need not be installed).") + directories.add_argument( + '-p', '--package-dir', action='store_true', + help="Directory enclosing a spec's package.py file.") + directories.add_argument( + '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.") + directories.add_argument( + '-b', '--build-dir', action='store_true', + help="Expanded archive directory for a spec (requires it to be staged first).") + + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.") + + +def location(parser, args): + if args.module_dir: + print spack.module_path + + elif args.spack_root: + print spack.prefix + + else: + specs = spack.cmd.parse_specs(args.spec, concretize=True) + if not specs: + tty.die("You must supply a spec.") + if len(specs) != 1: + tty.die("Too many specs. Need only one.") + spec = specs[0] + + if args.install_dir: + print spec.prefix + + elif args.package_dir: + print join_path(spack.db.root, spec.name) + + else: + pkg = spack.db.get(spec) + + if args.stage_dir: + print pkg.stage.path + + else: # args.build_dir is the default. + if not os.listdir(pkg.stage.path): + tty.die("Build directory does not exist yet. Run this to create it:", + "spack stage " + " ".join(args.spec)) + print pkg.stage.expanded_archive_path + diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 129ac6bd45..b42b329085 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -24,10 +24,10 @@ ############################################################################## import os import shutil -import argparse from datetime import datetime from contextlib import closing +from external import argparse import llnl.util.tty as tty from llnl.util.tty.colify import colify from llnl.util.filesystem import mkdirp, join_path @@ -41,7 +41,7 @@ from spack.stage import Stage from spack.util.compression import extension -description = "Manage spack mirrors." +description = "Manage mirrors." def setup_parser(subparser): subparser.add_argument( diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py new file mode 100644 index 0000000000..34f0855a50 --- /dev/null +++ b/lib/spack/spack/cmd/module.py @@ -0,0 +1,107 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys +import os +import shutil +from external import argparse + +import llnl.util.tty as tty +from llnl.util.lang import partition_list +from llnl.util.filesystem import mkdirp + +import spack.cmd +from spack.modules import module_types +from spack.util.string import * + +from spack.spec import Spec + +description ="Manipulate modules and dotkits." + + +def setup_parser(subparser): + sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') + + refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.') + + find_parser = sp.add_parser('find', help='Find module files for packages.') + find_parser.add_argument( + 'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]") + find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.') + + +def module_find(mtype, spec_array): + """Look at all installed packages and see if the spec provided + matches any. If it does, check whether there is a module file + of type <mtype> there, and print out the name that the user + should type to use that package's module. + """ + if mtype not in module_types: + tty.die("Invalid module type: '%s'. Options are %s." % (mtype, comma_or(module_types))) + + specs = spack.cmd.parse_specs(spec_array) + if len(specs) > 1: + tty.die("You can only pass one spec.") + spec = specs[0] + + specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)] + if len(specs) == 0: + tty.die("No installed packages match spec %s" % spec) + + if len(specs) > 1: + tty.error("Multiple matches for spec %s. Choose one:" % spec) + for s in specs: + sys.stderr.write(s.tree(color=True)) + sys.exit(1) + + mt = module_types[mtype] + mod = mt(specs[0]) + if not os.path.isfile(mod.file_name): + tty.die("No %s module is installed for %s." % (mtype, spec)) + + print mod.use_name + + +def module_refresh(): + """Regenerate all module files for installed packages known to + spack (some packages may no longer exist).""" + specs = [s for s in spack.db.installed_known_package_specs()] + + for name, cls in module_types.items(): + tty.msg("Regenerating %s module files." % name) + if os.path.isdir(cls.path): + shutil.rmtree(cls.path, ignore_errors=False) + mkdirp(cls.path) + for spec in specs: + tty.debug(" Writing file for %s." % spec) + cls(spec).write() + + + +def module(parser, args): + if args.module_command == 'refresh': + module_refresh() + + elif args.module_command == 'find': + module_find(args.module_type, args.spec) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index 2356583b07..a6556c4828 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -22,7 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import spack.cmd import spack diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py new file mode 100644 index 0000000000..82ebd13ff9 --- /dev/null +++ b/lib/spack/spack/cmd/pkg.py @@ -0,0 +1,124 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os + +from external import argparse +import llnl.util.tty as tty +from llnl.util.tty.colify import colify + +import spack +from spack.util.executable import * + +description = "Query packages associated with particular git revisions in spack." + +def setup_parser(subparser): + sp = subparser.add_subparsers( + metavar='SUBCOMMAND', dest='pkg_command') + + list_parser = sp.add_parser('list', help=pkg_list.__doc__) + list_parser.add_argument('rev', default='HEAD', nargs='?', + help="Revision to list packages for.") + + diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__) + diff_parser.add_argument('rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + diff_parser.add_argument('rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") + + add_parser = sp.add_parser('added', help=pkg_added.__doc__) + add_parser.add_argument('rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + add_parser.add_argument('rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") + + rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__) + rm_parser.add_argument('rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + rm_parser.add_argument('rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") + + +def get_git(): + # cd to spack prefix to do git operations + os.chdir(spack.prefix) + + # If this is a non-git version of spack, give up. + if not os.path.isdir('.git'): + tty.die("No git repo in %s. Can't use 'spack pkg'" % spack.prefix) + + return which("git", required=True) + + +def list_packages(rev): + git = get_git() + relpath = spack.packages_path[len(spack.prefix + os.path.sep):] + os.path.sep + output = git('ls-tree', '--full-tree', '--name-only', rev, relpath, + return_output=True) + return sorted(line[len(relpath):] for line in output.split('\n') if line) + + +def pkg_list(args): + """List packages associated with a particular spack git revision.""" + colify(list_packages(args.rev)) + + +def diff_packages(rev1, rev2): + p1 = set(list_packages(rev1)) + p2 = set(list_packages(rev2)) + return p1.difference(p2), p2.difference(p1) + + +def pkg_diff(args): + """Compare packages available in two different git revisions.""" + u1, u2 = diff_packages(args.rev1, args.rev2) + + if u1: + print "%s:" % args.rev1 + colify(sorted(u1), indent=4) + if u1: print + + if u2: + print "%s:" % args.rev2 + colify(sorted(u2), indent=4) + + +def pkg_removed(args): + """Show packages removed since a commit.""" + u1, u2 = diff_packages(args.rev1, args.rev2) + if u1: colify(sorted(u1)) + + +def pkg_added(args): + """Show packages added since a commit.""" + u1, u2 = diff_packages(args.rev1, args.rev2) + if u2: colify(sorted(u2)) + + +def pkg(parser, args): + action = { 'diff' : pkg_diff, + 'list' : pkg_list, + 'removed' : pkg_removed, + 'added' : pkg_added } + action[args.pkg_command](args) diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py index 1a652c82d1..2bcdc9fba2 100644 --- a/lib/spack/spack/cmd/providers.py +++ b/lib/spack/spack/cmd/providers.py @@ -23,7 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import argparse +from external import argparse from llnl.util.tty.colify import colify diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py index 641394044c..86b8c827f8 100644 --- a/lib/spack/spack/cmd/python.py +++ b/lib/spack/spack/cmd/python.py @@ -25,7 +25,7 @@ import os import sys import code -import argparse +from external import argparse import platform from contextlib import closing diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index fb5a900c3f..5fcb0a9b5a 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -22,7 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import spack.cmd import llnl.util.tty as tty diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index 1bf1f93c2f..c8bc473c55 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -22,8 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +import os +from external import argparse +import llnl.util.tty as tty import spack import spack.cmd @@ -33,18 +35,21 @@ def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check downloaded packages against checksum") + + dir_parser = subparser.add_mutually_exclusive_group() subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage") + 'specs', nargs=argparse.REMAINDER, help="specs of packages to stage") def stage(parser, args): - if not args.packages: + if not args.specs: tty.die("stage requires at least one package argument") if args.no_checksum: spack.do_checksum = False - specs = spack.cmd.parse_specs(args.packages, concretize=True) + specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: package = spack.db.get(spec) package.do_stage() + diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 28a2f659ae..84eb4703a6 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -22,12 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse +from external import argparse import llnl.util.tty as tty import spack import spack.cmd +import spack.packages description="Remove an installed package" @@ -68,7 +69,15 @@ def uninstall(parser, args): if len(matching_specs) == 0: tty.die("%s does not match any installed packages." % spec) - pkgs.extend(spack.db.get(s) for s in matching_specs) + for s in matching_specs: + try: + # should work if package is known to spack + pkgs.append(spack.db.get(s)) + + except spack.packages.UnknownPackageError, e: + # The package.py file has gone away -- but still want to uninstall. + spack.Package(s).do_uninstall(force=True) + # Sort packages to be uninstalled by the number of installed dependents # This ensures we do things in the right order diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py new file mode 100644 index 0000000000..6442c48cb1 --- /dev/null +++ b/lib/spack/spack/cmd/unload.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by David Beckingsale, david@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import spack.modules + +description ="Remove package from environment using module." + +def setup_parser(subparser): + """Parser is only constructed so that this prints a nice help + message with -h. """ + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.') + + +def unload(parser, args): + spack.modules.print_help() diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py index a31e16d11a..2a7229a3a0 100644 --- a/lib/spack/spack/cmd/unuse.py +++ b/lib/spack/spack/cmd/unuse.py @@ -22,15 +22,17 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse -import spack.cmd.use +from external import argparse +import spack.modules description ="Remove package from environment using dotkit." def setup_parser(subparser): + """Parser is only constructed so that this prints a nice help + message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to remove.') + 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.') def unuse(parser, args): - spack.cmd.use.print_help() + spack.modules.print_help() diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py index 10a0644df8..e34c194739 100644 --- a/lib/spack/spack/cmd/use.py +++ b/lib/spack/spack/cmd/use.py @@ -22,29 +22,17 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse -import llnl.util.tty as tty -import spack +from external import argparse +import spack.modules description ="Add package to environment using dotkit." def setup_parser(subparser): + """Parser is only constructed so that this prints a nice help + message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to add.') - - -def print_help(): - tty.msg("Spack dotkit support is not initialized.", - "", - "To use dotkit with Spack, you must first run the command", - "below, which you can copy and paste:", - "", - "For bash:", - " . %s/setup-env.bash" % spack.share_path, - "", - "ksh/csh/tcsh shells are currently unsupported", - "") + 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.') def use(parser, args): - print_help() + spack.modules.print_help() diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 716356bdd2..90fbf08241 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -94,6 +94,9 @@ class Compiler(object): # Names of generic arguments used by this compiler arg_rpath = '-Wl,-rpath,%s' + # argument used to get C++11 options + cxx11_flag = "-std=c++11" + def __init__(self, cspec, cc, cxx, f77, fc): def check(exe): @@ -189,7 +192,7 @@ class Compiler(object): return None successful = [key for key in parmap(check, checks) if key is not None] - return { (v, p, s) : path for v, p, s, path in successful } + return dict(((v, p, s), path) for v, p, s, path in successful) @classmethod def find(cls, *path): diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index c5bfe21ed4..467472cced 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -176,7 +176,7 @@ def compilers_for_spec(compiler_spec): config = _get_config() def get_compiler(cspec): - items = { k:v for k,v in config.items('compiler "%s"' % cspec) } + items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec)) if not all(n in items for n in _required_instance_vars): raise InvalidCompilerConfigurationError(cspec) diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index cc3c52ca61..097b24bb87 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -22,7 +22,9 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import llnl.util.tty as tty from spack.compiler import * +from spack.version import ver class Gcc(Compiler): # Subclasses use possible names of C compiler @@ -40,6 +42,15 @@ class Gcc(Compiler): # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes. suffixes = [r'-mp-\d\.\d'] + @property + def cxx11_flag(self): + if self.version < ver('4.3'): + tty.die("Only gcc 4.3 and above support c++11.") + elif self.version < ver('4.7'): + return "-std=gnu++0x" + else: + return "-std=gnu++11" + @classmethod def fc_version(cls, fc): return get_compiler_version( diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index 02e3b96b19..2a72c4eaea 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -37,6 +37,15 @@ class Intel(Compiler): # Subclasses use possible names of Fortran 90 compiler fc_names = ['ifort'] + @property + def cxx11_flag(self): + if self.version < ver('11.1'): + tty.die("Only intel 11.1 and above support c++11.") + elif self.version < ver('13'): + return "-std=c++0x" + else: + return "-std=c++11" + @classmethod def default_version(cls, comp): diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index f5775ef1bf..e6d1bb87d4 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -72,7 +72,7 @@ class DefaultConcretizer(object): if valid_versions: spec.versions = ver([valid_versions[-1]]) else: - spec.versions = ver([pkg.default_version]) + raise NoValidVerionError(spec) def concretize_architecture(self, spec): @@ -118,7 +118,7 @@ class DefaultConcretizer(object): return try: - nearest = next(p for p in spec.preorder_traversal(direction='parents') + nearest = next(p for p in spec.traverse(direction='parents') if p.compiler is not None).compiler if not nearest in all_compilers: @@ -158,3 +158,11 @@ class UnavailableCompilerVersionError(spack.error.SpackError): super(UnavailableCompilerVersionError, self).__init__( "No available compiler version matches '%s'" % compiler_spec, "Run 'spack compilers' to see available compiler Options.") + + +class NoValidVerionError(spack.error.SpackError): + """Raised when there is no available version for a package that + satisfies a spec.""" + def __init__(self, spec): + super(NoValidVerionError, self).__init__( + "No available version of %s matches '%s'" % (spec.name, spec.versions)) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 5494adc324..85ee16a1c2 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -84,10 +84,9 @@ import os import re import inspect import ConfigParser as cp -from collections import OrderedDict +from external.ordereddict import OrderedDict from llnl.util.lang import memoized - import spack.error __all__ = [ @@ -222,7 +221,6 @@ class SpackConfigParser(cp.RawConfigParser): """ # Slightly modify Python option expressions to allow leading whitespace OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern) - OPTCRE_NV = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE_NV.pattern) def __init__(self, file_or_files): cp.RawConfigParser.__init__(self, dict_type=OrderedDict) @@ -341,14 +339,13 @@ class SpackConfigParser(cp.RawConfigParser): def _read(self, fp, fpname): - """This is a copy of Python 2.7's _read() method, with support for - continuation lines removed. - """ - cursect = None # None, or a dictionary + """This is a copy of Python 2.6's _read() method, with support for + continuation lines removed.""" + cursect = None # None, or a dictionary optname = None - lineno = 0 comment = 0 - e = None # None, or an exception + lineno = 0 + e = None # None, or an exception while True: line = fp.readline() if not line: @@ -359,7 +356,6 @@ class SpackConfigParser(cp.RawConfigParser): (line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")): self._sections["comment-%d" % comment] = line comment += 1 - continue # a section header or option header? else: # is it a section header? @@ -381,27 +377,21 @@ class SpackConfigParser(cp.RawConfigParser): raise cp.MissingSectionHeaderError(fpname, lineno, line) # an option line? else: - mo = self._optcre.match(line) + mo = self.OPTCRE.match(line) if mo: optname, vi, optval = mo.group('option', 'vi', 'value') + if vi in ('=', ':') and ';' in optval: + # ';' is a comment delimiter only if it follows + # a spacing character + pos = optval.find(';') + if pos != -1 and optval[pos-1].isspace(): + optval = optval[:pos] + optval = optval.strip() + # allow empty values + if optval == '""': + optval = '' optname = self.optionxform(optname.rstrip()) - # This check is fine because the OPTCRE cannot - # match if it would set optval to None - if optval is not None: - if vi in ('=', ':') and ';' in optval: - # ';' is a comment delimiter only if it follows - # a spacing character - pos = optval.find(';') - if pos != -1 and optval[pos-1].isspace(): - optval = optval[:pos] - optval = optval.strip() - # allow empty values - if optval == '""': - optval = '' - cursect[optname] = [optval] - else: - # valueless option handling - cursect[optname] = optval + cursect[optname] = optval else: # a non-fatal parsing error occurred. set up the # exception but keep going. the exception will be @@ -414,23 +404,13 @@ class SpackConfigParser(cp.RawConfigParser): if e: raise e - # join the multi-line values collected while reading - all_sections = [self._defaults] - all_sections.extend(self._sections.values()) - for options in all_sections: - # skip comments - if isinstance(options, basestring): - continue - for name, val in options.items(): - if isinstance(val, list): - options[name] = '\n'.join(val) def _write(self, fp): """Write an .ini-format representation of the configuration state. - This is taken from the default Python 2.7 source. It writes 4 + This is taken from the default Python 2.6 source. It writes 4 spaces at the beginning of lines instead of no leading space. """ if self._defaults: @@ -449,11 +429,9 @@ class SpackConfigParser(cp.RawConfigParser): # Allow leading whitespace fp.write("[%s]\n" % section) for (key, value) in self._sections[section].items(): - if key == "__name__": - continue - if (value is not None) or (self._optcre == self.OPTCRE): - key = " = ".join((key, str(value).replace('\n', '\n\t'))) - fp.write(" %s\n" % (key)) + if key != "__name__": + fp.write(" %s = %s\n" % + (key, str(value).replace('\n', '\n\t'))) class SpackConfigurationError(spack.error.SpackError): diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 28719521d2..9b31aad5fe 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -29,7 +29,10 @@ import hashlib import shutil from contextlib import closing +import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp + +import spack from spack.spec import Spec from spack.error import SpackError @@ -131,12 +134,9 @@ class SpecHashDirectoryLayout(DirectoryLayout): """Prefix size is number of characters in the SHA-1 prefix to use to make each hash unique. """ - prefix_size = kwargs.get('prefix_size', 8) - spec_file = kwargs.get('spec_file', '.spec') - + spec_file_name = kwargs.get('spec_file_name', '.spec') super(SpecHashDirectoryLayout, self).__init__(root) - self.prefix_size = prefix_size - self.spec_file = spec_file + self.spec_file_name = spec_file_name def relative_path_for_spec(self, spec): @@ -154,15 +154,36 @@ class SpecHashDirectoryLayout(DirectoryLayout): def read_spec(self, path): """Read the contents of a file and parse them as a spec""" with closing(open(path)) as spec_file: - string = spec_file.read().replace('\n', '') - return Spec(string) + # Specs from files are assumed normal and concrete + spec = Spec(spec_file.read().replace('\n', '')) + + # If we do not have a package on hand for this spec, we know + # it is concrete, and we *assume* that it is normal. This + # prevents us from trying to fetch a non-existing package, and + # allows best effort for commands like spack find. + if not spack.db.exists(spec.name): + spec._normal = True + spec._concrete = True + else: + spec.normalize() + if not spec.concrete: + tty.warn("Spec read from installed package is not concrete:", + path, spec) + + return spec + + + def spec_file_path(self, spec): + """Gets full path to spec file""" + _check_concrete(spec) + return join_path(self.path_for_spec(spec), self.spec_file_name) def make_path_for_spec(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) - spec_file_path = join_path(path, self.spec_file) + spec_file_path = self.spec_file_path(spec) if os.path.isdir(path): if not os.path.isfile(spec_file_path): @@ -176,8 +197,7 @@ class SpecHashDirectoryLayout(DirectoryLayout): spec_hash = self.hash_spec(spec) installed_hash = self.hash_spec(installed_spec) if installed_spec == spec_hash: - raise SpecHashCollisionError( - installed_hash, spec_hash, self.prefix_size) + raise SpecHashCollisionError(installed_hash, spec_hash) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match SHA-1 hash!' @@ -194,7 +214,7 @@ class SpecHashDirectoryLayout(DirectoryLayout): for path in traverse_dirs_at_depth(self.root, 3): arch, compiler, last_dir = path spec_file_path = join_path( - self.root, arch, compiler, last_dir, self.spec_file) + self.root, arch, compiler, last_dir, self.spec_file_name) if os.path.exists(spec_file_path): spec = self.read_spec(spec_file_path) yield spec @@ -208,10 +228,10 @@ class DirectoryLayoutError(SpackError): class SpecHashCollisionError(DirectoryLayoutError): """Raised when there is a hash collision in an SpecHashDirectoryLayout.""" - def __init__(self, installed_spec, new_spec, prefix_size): + def __init__(self, installed_spec, new_spec): super(SpecHashDirectoryLayout, self).__init__( - 'Specs %s and %s have the same %d character SHA-1 prefix!' - % prefix_size, installed_spec, new_spec) + 'Specs %s and %s have the same SHA-1 prefix!' + % installed_spec, new_spec) class InconsistentInstallDirectoryError(DirectoryLayoutError): diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 40e0e75fdb..8bcbd83c28 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -28,7 +28,8 @@ class SpackError(Exception): Subclasses can be found in the modules they have to do with. """ def __init__(self, message, long_message=None): - super(SpackError, self).__init__(message) + super(SpackError, self).__init__() + self.message = message self.long_message = long_message diff --git a/lib/spack/spack/hooks/dotkit.py b/lib/spack/spack/hooks/dotkit.py index 10b7732353..4e748ff80a 100644 --- a/lib/spack/spack/hooks/dotkit.py +++ b/lib/spack/spack/hooks/dotkit.py @@ -22,62 +22,14 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import re -import textwrap -import shutil -from contextlib import closing - -from llnl.util.filesystem import join_path, mkdirp - -import spack - - -def dotkit_file(pkg): - dk_file_name = pkg.spec.format('$_$@$%@$+$=$#') + ".dk" - return join_path(spack.dotkit_path, dk_file_name) +import spack.modules def post_install(pkg): - if not os.path.exists(spack.dotkit_path): - mkdirp(spack.dotkit_path) - - alterations = [] - for var, path in [ - ('PATH', pkg.prefix.bin), - ('MANPATH', pkg.prefix.man), - ('MANPATH', pkg.prefix.share_man), - ('LD_LIBRARY_PATH', pkg.prefix.lib), - ('LD_LIBRARY_PATH', pkg.prefix.lib64)]: - - if os.path.isdir(path): - alterations.append("dk_alter %s %s\n" % (var, path)) - - if not alterations: - return - - alterations.append("dk_alter CMAKE_PREFIX_PATH %s\n" % pkg.prefix) - - dk_file = dotkit_file(pkg) - with closing(open(dk_file, 'w')) as dk: - # Put everything in the spack category. - dk.write('#c spack\n') - - dk.write('#d %s\n' % pkg.spec.format("$_ $@")) - - # Recycle the description - if pkg.__doc__: - doc = re.sub(r'\s+', ' ', pkg.__doc__) - for line in textwrap.wrap(doc, 72): - dk.write("#h %s\n" % line) - - # Write alterations - for alter in alterations: - dk.write(alter) + dk = spack.modules.Dotkit(pkg.spec) + dk.write() def post_uninstall(pkg): - dk_file = dotkit_file(pkg) - if os.path.exists(dk_file): - shutil.rmtree(dk_file, ignore_errors=True) - + dk = spack.modules.Dotkit(pkg.spec) + dk.remove() diff --git a/lib/spack/spack/hooks/tclmodule.py b/lib/spack/spack/hooks/tclmodule.py new file mode 100644 index 0000000000..0b9fd5a67c --- /dev/null +++ b/lib/spack/spack/hooks/tclmodule.py @@ -0,0 +1,35 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by David Beckingsale, david@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import spack.modules + + +def post_install(pkg): + dk = spack.modules.TclModule(pkg.spec) + dk.write() + + +def post_uninstall(pkg): + dk = spack.modules.TclModule(pkg.spec) + dk.remove() diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py new file mode 100644 index 0000000000..755e9ea900 --- /dev/null +++ b/lib/spack/spack/modules.py @@ -0,0 +1,247 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""This module contains code for creating environment modules, which +can include dotkits, tcl modules, lmod, and others. + +The various types of modules are installed by post-install hooks and +removed after an uninstall by post-uninstall hooks. This class +consolidates the logic for creating an abstract description of the +information that module systems need. Currently that includes a +number directories to be appended to paths in the user's environment: + + * /bin directories to be appended to PATH + * /lib* directories for LD_LIBRARY_PATH + * /man* and /share/man* directories for LD_LIBRARY_PATH + * the package prefix for CMAKE_PREFIX_PATH + +This module also includes logic for coming up with unique names for +the module files so that they can be found by the various +shell-support files in $SPACK/share/spack/setup-env.*. + +Each hook in hooks/ implements the logic for writing its specific type +of module file. +""" +__all__ = ['EnvModule', 'Dotkit', 'TclModule'] + +import os +import re +import textwrap +import shutil +from contextlib import closing + +import llnl.util.tty as tty +from llnl.util.filesystem import join_path, mkdirp + +import spack + +"""Registry of all types of modules. Entries created by EnvModule's + metaclass.""" +module_types = {} + + +def print_help(): + """For use by commands to tell user how to activate shell support.""" + + tty.msg("This command requires spack's shell integration.", + "", + "To initialize spack's shell commands, you must run one of", + "the commands below. Choose the right command for your shell.", + "", + "For bash and zsh:", + " . %s/setup-env.sh" % spack.share_path, + "", + "For csh and tcsh:", + " setenv SPACK_ROOT %s" % spack.prefix, + " source %s/setup-env.csh" % spack.share_path, + "") + + +class EnvModule(object): + name = 'env_module' + + class __metaclass__(type): + def __init__(cls, name, bases, dict): + type.__init__(cls, name, bases, dict) + if cls.name != 'env_module': + module_types[cls.name] = cls + + + def __init__(self, spec=None): + # category in the modules system + # TODO: come up with smarter category names. + self.category = "spack" + + # Descriptions for the module system's UI + self.short_description = "" + self.long_description = "" + + # dict pathname -> list of directories to be prepended to in + # the module file. + self._paths = None + self.spec = spec + + + @property + def paths(self): + if self._paths is None: + self._paths = {} + + def add_path(path_name, directory): + path = self._paths.setdefault(path_name, []) + path.append(directory) + + # Add paths if they exist. + for var, directory in [ + ('PATH', self.spec.prefix.bin), + ('MANPATH', self.spec.prefix.man), + ('MANPATH', self.spec.prefix.share_man), + ('LD_LIBRARY_PATH', self.spec.prefix.lib), + ('LD_LIBRARY_PATH', self.spec.prefix.lib64)]: + + if os.path.isdir(directory): + add_path(var, directory) + + # short description is just the package + version + # TODO: maybe packages can optionally provide it. + self.short_description = self.spec.format("$_ $@") + + # long description is the docstring with reduced whitespace. + if self.spec.package.__doc__: + self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__) + + return self._paths + + + def write(self): + """Write out a module file for this object.""" + module_dir = os.path.dirname(self.file_name) + if not os.path.exists(module_dir): + mkdirp(module_dir) + + # If there are no paths, no need for a dotkit. + if not self.paths: + return + + with closing(open(self.file_name, 'w')) as f: + self._write(f) + + + def _write(self, stream): + """To be implemented by subclasses.""" + raise NotImplementedError() + + + @property + def file_name(self): + """Subclasses should implement this to return the name of the file + where this module lives.""" + raise NotImplementedError() + + + @property + def use_name(self): + """Subclasses should implement this to return the name the + module command uses to refer to the package.""" + raise NotImplementedError() + + + def remove(self): + mod_file = self.file_name + if os.path.exists(mod_file): + shutil.rmtree(mod_file, ignore_errors=True) + + +class Dotkit(EnvModule): + name = 'dotkit' + path = join_path(spack.share_path, "dotkit") + + @property + def file_name(self): + return join_path(Dotkit.path, self.spec.architecture, + self.spec.format('$_$@$%@$+$#.dk')) + + @property + def use_name(self): + return self.spec.format('$_$@$%@$+$#') + + + def _write(self, dk_file): + # Category + if self.category: + dk_file.write('#c %s\n' % self.category) + + # Short description + if self.short_description: + dk_file.write('#d %s\n' % self.short_description) + + # Long description + if self.long_description: + for line in textwrap.wrap(self.long_description, 72): + dk_file.write("#h %s\n" % line) + + # Path alterations + for var, dirs in self.paths.items(): + for directory in dirs: + dk_file.write("dk_alter %s %s\n" % (var, directory)) + + # Let CMake find this package. + dk_file.write("dk_alter CMAKE_PREFIX_PATH %s\n" % self.spec.prefix) + + +class TclModule(EnvModule): + name = 'tcl' + path = join_path(spack.share_path, "modules") + + @property + def file_name(self): + return join_path(TclModule.path, self.spec.architecture, self.use_name) + + + @property + def use_name(self): + return self.spec.format('$_$@$%@$+$#') + + + def _write(self, m_file): + # TODO: cateogry? + m_file.write('#%Module1.0\n') + + # Short description + if self.short_description: + m_file.write('module-whatis \"%s\"\n\n' % self.short_description) + + # Long description + if self.long_description: + m_file.write('proc ModulesHelp { } {\n') + doc = re.sub(r'"', '\"', self.long_description) + m_file.write("puts stderr \"%s\"\n" % doc) + m_file.write('}\n\n') + + # Path alterations + for var, dirs in self.paths.items(): + for directory in dirs: + m_file.write("prepend-path %s \"%s\"\n" % (var, directory)) + + m_file.write("prepend-path CMAKE_PREFIX_PATH \"%s\"\n" % self.spec.prefix) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 79a6c2362e..9644aa43d3 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -48,6 +48,7 @@ from llnl.util.lang import * import spack import spack.spec import spack.error +import spack.compilers import spack.hooks import spack.build_environment as build_env import spack.url as url @@ -57,7 +58,7 @@ from spack.util.web import get_pages from spack.util.compression import allowed_archive, extension """Allowed URL schemes for spack packages.""" -_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file"] +_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"] class Package(object): @@ -296,9 +297,12 @@ class Package(object): """ # - # These variables are defaults for the various relations defined on - # packages. Subclasses will have their own versions of these. + # These variables are defaults for the various "relations". # + """Map of information about Versions of this package. + Map goes: Version -> VersionDescriptor""" + versions = {} + """Specs of dependency packages, keyed by name.""" dependencies = {} @@ -317,17 +321,8 @@ class Package(object): """By default we build in parallel. Subclasses can override this.""" parallel = True - """Dirty hack for forcing packages with uninterpretable URLs - TODO: get rid of this. - """ - force_url = False - def __init__(self, spec): - # These attributes are required for all packages. - attr_required(self.__class__, 'homepage') - attr_required(self.__class__, 'url') - # this determines how the package should be built. self.spec = spec @@ -337,28 +332,36 @@ class Package(object): if '.' in self.name: self.name = self.name[self.name.rindex('.') + 1:] - # Make sure URL is an allowed type - validate_package_url(self.url) - - # patch up the URL with a new version if the spec version is concrete - if self.spec.versions.concrete: - self.url = self.url_for_version(self.spec.version) - # This is set by scraping a web page. self._available_versions = None - # versions should be a dict from version to checksum, for safe versions - # of this package. If it's not present, make it an empty dict. - if not hasattr(self, 'versions'): - self.versions = {} - - if not isinstance(self.versions, dict): - raise ValueError("versions attribute of package %s must be a dict!" - % self.name) + # Sanity check some required variables that could be + # overridden by package authors. + def sanity_check_dict(attr_name): + if not hasattr(self, attr_name): + raise PackageError("Package %s must define %s" % attr_name) + + attr = getattr(self, attr_name) + if not isinstance(attr, dict): + raise PackageError("Package %s has non-dict %s attribute!" + % (self.name, attr_name)) + sanity_check_dict('versions') + sanity_check_dict('dependencies') + sanity_check_dict('conflicted') + sanity_check_dict('patches') + + # Check versions in the versions dict. + for v in self.versions: + assert(isinstance(v, Version)) + + # Check version descriptors + for v in sorted(self.versions): + vdesc = self.versions[v] + assert(isinstance(vdesc, spack.relations.VersionDescriptor)) # Version-ize the keys in versions dict try: - self.versions = { Version(v):h for v,h in self.versions.items() } + self.versions = dict((Version(v), h) for v,h in self.versions.items()) except ValueError: raise ValueError("Keys of versions dict in package %s must be versions!" % self.name) @@ -366,6 +369,10 @@ class Package(object): # stage used to build this package. self._stage = None + # patch up self.url based on the actual version + if self.spec.concrete: + self.url = self.url_for_version(self.version) + # Set a default list URL (place to find available versions) if not hasattr(self, 'list_url'): self.list_url = None @@ -375,18 +382,6 @@ class Package(object): @property - def default_version(self): - """Get the version in the default URL for this package, - or fails.""" - try: - return url.parse_version(self.__class__.url) - except UndetectableVersionError: - raise PackageError( - "Couldn't extract a default version from %s." % self.url, - " You must specify it explicitly in the package file.") - - - @property def version(self): if not self.spec.concrete: raise ValueError("Can only get version of concrete package.") @@ -399,9 +394,13 @@ class Package(object): raise ValueError("Can only get a stage for a concrete package.") if self._stage is None: + if not self.url: + raise PackageVersionError(self.version) + # TODO: move this logic into a mirror module. mirror_path = "%s/%s" % (self.name, "%s-%s.%s" % ( self.name, self.version, extension(self.url))) + self._stage = Stage( self.url, mirror_path=mirror_path, name=self.spec.short_spec) return self._stage @@ -496,7 +495,7 @@ class Package(object): on this one.""" dependents = [] for spec in spack.db.installed_package_specs(): - if self.spec != spec and self.spec in spec: + if self.name != spec.name and self.spec in spec: dependents.append(spec) return dependents @@ -507,6 +506,14 @@ class Package(object): return self.spec.prefix + @property + def compiler(self): + """Get the spack.compiler.Compiler object used to build this package.""" + if not self.spec.concrete: + raise ValueError("Can only get a compiler for a concrete package.") + return spack.compilers.compiler_for_spec(self.spec.compiler) + + def url_version(self, version): """Given a version, this returns a string that should be substituted into the package's URL to download that version. @@ -514,16 +521,48 @@ class Package(object): override this, e.g. for boost versions where you need to ensure that there are _'s in the download URL. """ - if self.force_url: - return self.default_version return str(version) def url_for_version(self, version): - """Gives a URL that you can download a new version of this package from.""" - if self.force_url: - return self.url - return url.substitute_version(self.__class__.url, self.url_version(version)) + """Returns a URL that you can download a new version of this package from.""" + if not isinstance(version, Version): + version = Version(version) + + def nearest_url(version): + """Finds the URL for the next lowest version with a URL. + If there is no lower version with a URL, uses the + package url property. If that isn't there, uses a + *higher* URL, and if that isn't there raises an error. + """ + url = getattr(self, 'url', None) + for v in sorted(self.versions): + if v > version and url: + break + if self.versions[v].url: + url = self.versions[v].url + return url + + if version in self.versions: + vdesc = self.versions[version] + if not vdesc.url: + base_url = nearest_url(version) + vdesc.url = url.substitute_version( + base_url, self.url_version(version)) + return vdesc.url + else: + return nearest_url(version) + + + @property + def default_url(self): + if self.concrete: + return self.url_for_version(self.version) + else: + url = getattr(self, 'url', None) + if url: + return url + def remove_prefix(self): @@ -548,7 +587,7 @@ class Package(object): self.stage.fetch() if spack.do_checksum and self.version in self.versions: - digest = self.versions[self.version] + digest = self.versions[self.version].checksum self.stage.check(digest) tty.msg("Checksum passed for %s@%s" % (self.name, self.version)) @@ -743,7 +782,7 @@ class Package(object): ' '.join(formatted_deps)) self.remove_prefix() - tty.msg("Successfully uninstalled %s." % self.spec) + tty.msg("Successfully uninstalled %s." % self.spec.short_spec) # Once everything else is done, run post install hooks spack.hooks.post_uninstall(self) @@ -779,6 +818,9 @@ class Package(object): def fetch_available_versions(self): + if not hasattr(self, 'url'): + raise VersionFetchError(self.__class__) + # If not, then try to fetch using list_url if not self._available_versions: try: @@ -865,7 +907,6 @@ def print_pkg(message): print message - class FetchError(spack.error.SpackError): """Raised when something goes wrong during fetch.""" def __init__(self, message, long_msg=None): @@ -889,3 +930,19 @@ class InvalidPackageDependencyError(PackageError): its dependencies.""" def __init__(self, message): super(InvalidPackageDependencyError, self).__init__(message) + + +class PackageVersionError(PackageError): + """Raised when a version URL cannot automatically be determined.""" + def __init__(self, version): + super(PackageVersionError, self).__init__( + "Cannot determine a URL automatically for version %s." % version, + "Please provide a url for this version in the package.py file.") + + +class VersionFetchError(PackageError): + """Raised when a version URL cannot automatically be determined.""" + def __init__(self, cls): + super(VersionFetchError, self).__init__( + "Cannot fetch version for package %s " % cls.__name__ + + "because it does not define a default url.") diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 00834c95d5..72f9403a64 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -69,9 +69,12 @@ class PackageDB(object): if not spec in self.instances: package_class = self.get_class_for_package_name(spec.name) - self.instances[spec.name] = package_class(spec) + try: + self.instances[spec.copy()] = package_class(spec) + except Exception, e: + raise FailedConstructorError(spec.name, e) - return self.instances[spec.name] + return self.instances[spec] @_autospec @@ -115,7 +118,23 @@ class PackageDB(object): """Read installed package names straight from the install directory layout. """ - return spack.install_layout.all_specs() + # Get specs from the directory layout but ensure that they're + # all normalized properly. + installed = [] + for spec in spack.install_layout.all_specs(): + spec.normalize() + installed.append(spec) + return installed + + + def installed_known_package_specs(self): + """Read installed package names straight from the install + directory layout, but return only specs for which the + package is known to this version of spack. + """ + for spec in spack.install_layout.all_specs(): + if self.exists(spec.name): + yield spec @memoized @@ -179,24 +198,6 @@ class PackageDB(object): return cls - def compute_dependents(self): - """Reads in all package files and sets dependence information on - Package objects in memory. - """ - if not hasattr(compute_dependents, index): - compute_dependents.index = {} - - for pkg in all_packages(): - if pkg._dependents is None: - pkg._dependents = [] - - for name, dep in pkg.dependencies.iteritems(): - dpkg = self.get(name) - if dpkg._dependents is None: - dpkg._dependents = [] - dpkg._dependents.append(pkg.name) - - def graph_dependencies(self, out=sys.stdout): """Print out a graph of all the dependencies between package. Graph is in dot format.""" @@ -211,10 +212,17 @@ class PackageDB(object): return '"%s"' % string deps = [] - for pkg in all_packages(): + for pkg in self.all_packages(): out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) + + # Add edges for each depends_on in the package. for dep_name, dep in pkg.dependencies.iteritems(): deps.append((pkg.name, dep_name)) + + # If the package provides something, add an edge for that. + for provider in set(p.name for p in pkg.provided): + deps.append((provider, pkg.name)) + out.write('\n') for pair in deps: @@ -227,3 +235,12 @@ class UnknownPackageError(spack.error.SpackError): def __init__(self, name): super(UnknownPackageError, self).__init__("Package %s not found." % name) self.name = name + + +class FailedConstructorError(spack.error.SpackError): + """Raised when a package's class constructor fails.""" + def __init__(self, name, reason): + super(FailedConstructorError, self).__init__( + "Class constructor failed for package '%s'." % name, + str(reason)) + self.name = name diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index f46b7dfc84..5afb7e7624 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -68,23 +68,48 @@ provides spack install mpileaks ^mvapich spack install mpileaks ^mpich """ +__all__ = [ 'depends_on', 'provides', 'patch', 'version' ] + import re import inspect -import importlib from llnl.util.lang import * import spack import spack.spec import spack.error +import spack.url +from spack.version import Version from spack.patch import Patch from spack.spec import Spec, parse_anonymous_spec -"""Adds a dependencies local variable in the locals of - the calling class, based on args. """ +class VersionDescriptor(object): + """A VersionDescriptor contains information to describe a + particular version of a package. That currently includes a URL + for the version along with a checksum.""" + def __init__(self, checksum, url): + self.checksum = checksum + self.url = url + + +def version(ver, checksum, **kwargs): + """Adds a version and associated metadata to the package.""" + pkg = caller_locals() + + versions = pkg.setdefault('versions', {}) + patches = pkg.setdefault('patches', {}) + + ver = Version(ver) + url = kwargs.get('url', None) + + versions[ver] = VersionDescriptor(checksum, url) + + def depends_on(*specs): + """Adds a dependencies local variable in the locals of + the calling class, based on args. """ pkg = get_calling_package_name() dependencies = caller_locals().setdefault('dependencies', {}) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1e2da10dcc..4838fd9946 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -94,6 +94,7 @@ import sys import itertools import hashlib from StringIO import StringIO +from operator import attrgetter import llnl.util.tty as tty from llnl.util.lang import * @@ -309,9 +310,8 @@ class DependencyMap(HashableMap): def __str__(self): - sorted_dep_names = sorted(self.keys()) return ''.join( - ["^" + str(self[name]) for name in sorted_dep_names]) + ["^" + str(self[name]) for name in sorted(self.keys())]) @key_ordering @@ -345,6 +345,13 @@ class Spec(object): self.compiler = other.compiler self.dependencies = other.dependencies + # Specs are by default not assumed to be normal, but in some + # cases we've read them from a file want to assume normal. + # This allows us to manipulate specs that Spack doesn't have + # package.py files for. + self._normal = kwargs.get('normal', False) + self._concrete = kwargs.get('concrete', False) + # This allows users to construct a spec DAG with literals. # Note that given two specs a and b, Spec(a) copies a, but # Spec(a, b) will copy a but just add b as a dep. @@ -432,17 +439,30 @@ class Spec(object): If any of the name, version, architecture, compiler, or depdenencies are ambiguous,then it is not concrete. """ - return bool(not self.virtual - and self.versions.concrete - and self.architecture - and self.compiler and self.compiler.concrete - and self.dependencies.concrete) + if self._concrete: + return True + + self._concrete = bool(not self.virtual + and self.versions.concrete + and self.architecture + and self.compiler and self.compiler.concrete + and self.dependencies.concrete) + return self._concrete - def preorder_traversal(self, visited=None, d=0, **kwargs): - """Generic preorder traversal of the DAG represented by this spec. + def traverse(self, visited=None, d=0, **kwargs): + """Generic traversal of the DAG represented by this spec. This will yield each node in the spec. Options: + order [=pre|post] + Order to traverse spec nodes. Defaults to preorder traversal. + Options are: + + 'pre': Pre-order traversal; each node is yielded before its + children in the dependency DAG. + 'post': Post-order traversal; each node is yielded after its + children in the dependency DAG. + cover [=nodes|edges|paths] Determines how extensively to cover the dag. Possible vlaues: @@ -460,7 +480,7 @@ class Spec(object): spec, but also their depth from the root in a (depth, node) tuple. - keyfun [=id] + key [=id] Allow a custom key function to track the identity of nodes in the traversal. @@ -472,44 +492,57 @@ class Spec(object): 'parents', traverses upwards in the DAG towards the root. """ + # get initial values for kwargs depth = kwargs.get('depth', False) key_fun = kwargs.get('key', id) + if isinstance(key_fun, basestring): + key_fun = attrgetter(key_fun) yield_root = kwargs.get('root', True) cover = kwargs.get('cover', 'nodes') direction = kwargs.get('direction', 'children') + order = kwargs.get('order', 'pre') - cover_values = ('nodes', 'edges', 'paths') - if cover not in cover_values: - raise ValueError("Invalid value for cover: %s. Choices are %s" - % (cover, ",".join(cover_values))) - - direction_values = ('children', 'parents') - if direction not in direction_values: - raise ValueError("Invalid value for direction: %s. Choices are %s" - % (direction, ",".join(direction_values))) + # Make sure kwargs have legal values; raise ValueError if not. + def validate(name, val, allowed_values): + if val not in allowed_values: + raise ValueError("Invalid value for %s: %s. Choices are %s" + % (name, val, ",".join(allowed_values))) + validate('cover', cover, ('nodes', 'edges', 'paths')) + validate('direction', direction, ('children', 'parents')) + validate('order', order, ('pre', 'post')) if visited is None: visited = set() + key = key_fun(self) + + # Node traversal does not yield visited nodes. + if key in visited and cover == 'nodes': + return + # Determine whether and what to yield for this node. + yield_me = yield_root or d > 0 result = (d, self) if depth else self - key = key_fun(self) - if key in visited: - if cover == 'nodes': return - if yield_root or d > 0: yield result - if cover == 'edges': return - else: - if yield_root or d > 0: yield result + # Preorder traversal yields before successors + if yield_me and order == 'pre': + yield result - successors = self.dependencies - if direction == 'parents': - successors = self.dependents + # Edge traversal yields but skips children of visited nodes + if not (key in visited and cover == 'edges'): + # This code determines direction and yields the children/parents + successors = self.dependencies + if direction == 'parents': + successors = self.dependents - visited.add(key) - for name in sorted(successors): - child = successors[name] - for elt in child.preorder_traversal(visited, d+1, **kwargs): - yield elt + visited.add(key) + for name in sorted(successors): + child = successors[name] + for elt in child.traverse(visited, d+1, **kwargs): + yield elt + + # Postorder traversal yields after successors + if yield_me and order == 'post': + yield result @property @@ -525,13 +558,14 @@ class Spec(object): def dep_hash(self, length=None): - """Return a hash representing the dependencies of this spec - This will always normalize first so that the hash is consistent. - """ - self.normalize() + """Return a hash representing all dependencies of this spec + (direct and indirect). + If you want this hash to be consistent, you should + concretize the spec first so that it is not ambiguous. + """ sha = hashlib.sha1() - sha.update(str(self.dependencies)) + sha.update(self.dep_string()) full_hash = sha.hexdigest() return full_hash[:length] @@ -594,7 +628,7 @@ class Spec(object): a problem. """ while True: - virtuals =[v for v in self.preorder_traversal() if v.virtual] + virtuals =[v for v in self.traverse() if v.virtual] if not virtuals: return @@ -605,8 +639,8 @@ class Spec(object): spec._replace_with(concrete) # If there are duplicate providers or duplicate provider deps, this - # consolidates them and merges constraints. - self.normalize() + # consolidates them and merge constraints. + self.normalize(force=True) def concretize(self): @@ -621,9 +655,13 @@ class Spec(object): with requirements of its pacakges. See flatten() and normalize() for more details on this. """ + if self._concrete: + return + self.normalize() self._expand_virtual_packages() self._concretize_helper() + self._concrete = True def concretized(self): @@ -635,47 +673,51 @@ class Spec(object): return clone - def flat_dependencies(self): - """Return a DependencyMap containing all of this spec's dependencies - with their constraints merged. If there are any conflicts, throw - an exception. + def flat_dependencies(self, **kwargs): + """Return a DependencyMap containing all of this spec's + dependencies with their constraints merged. - This will work even on specs that are not normalized; i.e. specs - that have two instances of the same dependency in the DAG. - This is used as the first step of normalization. + If copy is True, returns merged copies of its dependencies + without modifying the spec it's called on. + + If copy is False, clears this spec's dependencies and + returns them. """ - # This ensures that the package descriptions themselves are consistent - if not self.virtual: - self.package.validate_dependencies() + copy = kwargs.get('copy', True) - # Once that is guaranteed, we know any constraint violations are due - # to the spec -- so they're the user's fault, not Spack's. flat_deps = DependencyMap() try: - for spec in self.preorder_traversal(): + for spec in self.traverse(root=False): if spec.name not in flat_deps: - new_spec = spec.copy(dependencies=False) - flat_deps[spec.name] = new_spec - + if copy: + flat_deps[spec.name] = spec.copy(deps=False) + else: + flat_deps[spec.name] = spec else: flat_deps[spec.name].constrain(spec) + if not copy: + for dep in flat_deps.values(): + dep.dependencies.clear() + dep.dependents.clear() + self.dependencies.clear() + + return flat_deps + except UnsatisfiableSpecError, e: - # This REALLY shouldn't happen unless something is wrong in spack. - # It means we got a spec DAG with two instances of the same package - # that had inconsistent constraints. There's no way for a user to - # produce a spec like this (the parser adds all deps to the root), - # so this means OUR code is not sane! + # Here, the DAG contains two instances of the same package + # with inconsistent constraints. Users cannot produce + # inconsistent specs like this on the command line: the + # parser doesn't allow it. Spack must be broken! raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) - return flat_deps - def flatten(self): """Pull all dependencies up to the root (this spec). Merge constraints for dependencies with the same name, and if they conflict, throw an exception. """ - self.dependencies = self.flat_dependencies() + for dep in self.flat_dependencies(copy=False): + self._add_dependency(dep) def _normalize_helper(self, visited, spec_deps, provider_index): @@ -754,7 +796,7 @@ class Spec(object): dependency._normalize_helper(visited, spec_deps, provider_index) - def normalize(self): + def normalize(self, **kwargs): """When specs are parsed, any dependencies specified are hanging off the root, and ONLY the ones that were explicitly provided are there. Normalization turns a partial flat spec into a DAG, where: @@ -772,14 +814,18 @@ class Spec(object): TODO: normalize should probably implement some form of cycle detection, to ensure that the spec is actually a DAG. """ + if self._normal and not kwargs.get('force', False): + return + # Ensure first that all packages & compilers in the DAG exist. self.validate_names() - # Then ensure that the packages referenced are sane, that the - # provided spec is sane, and that all dependency specs are in the - # root node of the spec. flat_dependencies will do this for us. - spec_deps = self.flat_dependencies() - self.dependencies.clear() + # Ensure that the package & dep descriptions are consistent & sane + if not self.virtual: + self.package.validate_dependencies() + + # Get all the dependencies into one DependencyMap + spec_deps = self.flat_dependencies(copy=False) # Figure out which of the user-provided deps provide virtual deps. # Remove virtual deps that are already provided by something in the spec @@ -792,7 +838,7 @@ class Spec(object): # If there are deps specified but not visited, they're not # actually deps of this package. Raise an error. - extra = set(spec_deps.viewkeys()).difference(visited) + extra = set(spec_deps.keys()).difference(visited) # Also subtract out all the packags that provide a needed vpkg vdeps = [v for v in self.package.virtual_dependencies()] @@ -805,6 +851,9 @@ class Spec(object): raise InvalidDependencyException( self.name + " does not depend on " + comma_or(extra)) + # Mark the spec as normal once done. + self._normal = True + def normalized(self): """Return a normalized copy of this spec without modifying this spec.""" @@ -818,7 +867,7 @@ class Spec(object): If they're not, it will raise either UnknownPackageError or UnsupportedCompilerError. """ - for spec in self.preorder_traversal(): + for spec in self.traverse(): # Don't get a package for a virtual name. if not spec.virtual: spack.db.get(spec.name) @@ -886,17 +935,17 @@ class Spec(object): def common_dependencies(self, other): """Return names of dependencies that self an other have in common.""" common = set( - s.name for s in self.preorder_traversal(root=False)) + s.name for s in self.traverse(root=False)) common.intersection_update( - s.name for s in other.preorder_traversal(root=False)) + s.name for s in other.traverse(root=False)) return common def dep_difference(self, other): """Returns dependencies in self that are not in other.""" - mine = set(s.name for s in self.preorder_traversal(root=False)) + mine = set(s.name for s in self.traverse(root=False)) mine.difference_update( - s.name for s in other.preorder_traversal(root=False)) + s.name for s in other.traverse(root=False)) return mine @@ -955,8 +1004,8 @@ class Spec(object): return False # For virtual dependencies, we need to dig a little deeper. - self_index = ProviderIndex(self.preorder_traversal(), restrict=True) - other_index = ProviderIndex(other.preorder_traversal(), restrict=True) + self_index = ProviderIndex(self.traverse(), restrict=True) + other_index = ProviderIndex(other.traverse(), restrict=True) # This handles cases where there are already providers for both vpkgs if not self_index.satisfies(other_index): @@ -978,7 +1027,7 @@ class Spec(object): def virtual_dependencies(self): """Return list of any virtual deps in this spec.""" - return [spec for spec in self.preorder_traversal() if spec.virtual] + return [spec for spec in self.traverse() if spec.virtual] def _dup(self, other, **kwargs): @@ -993,21 +1042,31 @@ class Spec(object): Whether deps should be copied too. Set to false to copy a spec but not its dependencies. """ - # TODO: this needs to handle DAGs. + # Local node attributes get copied first. self.name = other.name self.versions = other.versions.copy() self.variants = other.variants.copy() self.architecture = other.architecture - self.compiler = None - if other.compiler: - self.compiler = other.compiler.copy() - + self.compiler = other.compiler.copy() if other.compiler else None self.dependents = DependencyMap() - copy_deps = kwargs.get('dependencies', True) - if copy_deps: - self.dependencies = other.dependencies.copy() - else: - self.dependencies = DependencyMap() + self.dependencies = DependencyMap() + + # If we copy dependencies, preserve DAG structure in the new spec + if kwargs.get('deps', True): + # This copies the deps from other using _dup(deps=False) + new_nodes = other.flat_dependencies() + new_nodes[self.name] = self + + # Hook everything up properly here by traversing. + for spec in other.traverse(cover='nodes'): + parent = new_nodes[spec.name] + for child in spec.dependencies: + if child not in parent.dependencies: + parent._add_dependency(new_nodes[child]) + + # Since we preserved structure, we can copy _normal safely. + self._normal = other._normal + self._concrete = other._concrete def copy(self, **kwargs): @@ -1029,7 +1088,7 @@ class Spec(object): def __getitem__(self, name): """TODO: reconcile __getitem__, _add_dependency, __contains__""" - for spec in self.preorder_traversal(): + for spec in self.traverse(): if spec.name == name: return spec @@ -1040,15 +1099,82 @@ class Spec(object): """True if this spec has any dependency that satisfies the supplied spec.""" spec = self._autospec(spec) - for s in self.preorder_traversal(): + for s in self.traverse(): if s.satisfies(spec): return True return False - def _cmp_key(self): + def sorted_deps(self): + """Return a list of all dependencies sorted by name.""" + deps = self.flat_dependencies() + return tuple(deps[name] for name in sorted(deps)) + + + def _eq_dag(self, other, vs, vo): + """Recursive helper for eq_dag and ne_dag. Does the actual DAG + traversal.""" + vs.add(id(self)) + vo.add(id(other)) + + if self.ne_node(other): + return False + + if len(self.dependencies) != len(other.dependencies): + return False + + ssorted = [self.dependencies[name] for name in sorted(self.dependencies)] + osorted = [other.dependencies[name] for name in sorted(other.dependencies)] + + for s, o in zip(ssorted, osorted): + visited_s = id(s) in vs + visited_o = id(o) in vo + + # Check for duplicate or non-equal dependencies + if visited_s != visited_o: return False + + # Skip visited nodes + if visited_s or visited_o: continue + + # Recursive check for equality + if not s._eq_dag(o, vs, vo): + return False + + return True + + + def eq_dag(self, other): + """True if the full dependency DAGs of specs are equal""" + return self._eq_dag(other, set(), set()) + + + def ne_dag(self, other): + """True if the full dependency DAGs of specs are not equal""" + return not self.eq_dag(other) + + + def _cmp_node(self): + """Comparison key for just *this node* and not its deps.""" return (self.name, self.versions, self.variants, - self.architecture, self.compiler, self.dependencies) + self.architecture, self.compiler) + + + def eq_node(self, other): + """Equality with another spec, not including dependencies.""" + return self._cmp_node() == other._cmp_node() + + + def ne_node(self, other): + """Inequality with another spec, not including dependencies.""" + return self._cmp_node() != other._cmp_node() + + + def _cmp_key(self): + """Comparison key for this node and all dependencies *without* + considering structure. This is the default, as + normalization will restore structure. + """ + return self._cmp_node() + (self.sorted_deps(),) def colorized(self): @@ -1151,12 +1277,12 @@ class Spec(object): return result + def dep_string(self): + return ''.join("^" + dep.format() for dep in self.sorted_deps()) + + def __str__(self): - by_name = lambda d: d.name - deps = self.preorder_traversal(key=by_name, root=False) - sorted_deps = sorted(deps, key=by_name) - dep_string = ''.join("^" + dep.format() for dep in sorted_deps) - return self.format() + dep_string + return self.format() + self.dep_string() def tree(self, **kwargs): @@ -1172,7 +1298,7 @@ class Spec(object): out = "" cur_id = 0 ids = {} - for d, node in self.preorder_traversal(cover=cover, depth=True): + for d, node in self.traverse(order='pre', cover=cover, depth=True): out += " " * indent if depth: out += "%-4d" % d @@ -1261,6 +1387,9 @@ class SpecParser(spack.parse.Parser): spec.dependents = DependencyMap() spec.dependencies = DependencyMap() + spec._normal = False + spec._concrete = False + # record this so that we know whether version is # unspecified or not. added_version = False diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 839555d630..3dac798396 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -120,8 +120,7 @@ class Stage(object): if spack.use_tmp_stage: # If we're using a tmp dir, it's a link, and it points at the right spot, # then keep it. - if (os.path.commonprefix((real_path, real_tmp)) == real_tmp - and os.path.exists(real_path)): + if (real_path.startswith(real_tmp) and os.path.exists(real_path)): return False else: # otherwise, just unlink it and start over. diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 5442189c2e..8ddc7f227d 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -45,7 +45,9 @@ test_names = ['versions', 'multimethod', 'install', 'package_sanity', - 'config'] + 'config', + 'directory_layout', + 'python_version'] def list_tests(): @@ -70,7 +72,7 @@ def run(names, verbose=False): runner = unittest.TextTestRunner(verbosity=verbosity) - testsRun = errors = failures = skipped = 0 + testsRun = errors = failures = 0 for test in names: module = 'spack.test.' + test print module @@ -81,12 +83,10 @@ def run(names, verbose=False): testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) - skipped += len(result.skipped) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, - "%5d skipped" % skipped, "%5d failures" % failures, "%5d errors" % errors) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 6ad2ef29d8..a7f4812c8c 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -134,29 +134,29 @@ class ConcretizeTest(MockPackagesTest): def test_virtual_is_fully_expanded_for_callpath(self): # force dependence on fake "zmpi" by asking for MPI 10.0 spec = Spec('callpath ^mpi@10.0') - self.assertIn('mpi', spec.dependencies) - self.assertNotIn('fake', spec) + self.assertTrue('mpi' in spec.dependencies) + self.assertFalse('fake' in spec) spec.concretize() - self.assertIn('zmpi', spec.dependencies) - self.assertNotIn('mpi', spec) - self.assertIn('fake', spec.dependencies['zmpi']) + self.assertTrue('zmpi' in spec.dependencies) + self.assertFalse('mpi' in spec) + self.assertTrue('fake' in spec.dependencies['zmpi']) def test_virtual_is_fully_expanded_for_mpileaks(self): spec = Spec('mpileaks ^mpi@10.0') - self.assertIn('mpi', spec.dependencies) - self.assertNotIn('fake', spec) + self.assertTrue('mpi' in spec.dependencies) + self.assertFalse('fake' in spec) spec.concretize() - self.assertIn('zmpi', spec.dependencies) - self.assertIn('callpath', spec.dependencies) - self.assertIn('zmpi', spec.dependencies['callpath'].dependencies) - self.assertIn('fake', spec.dependencies['callpath'].dependencies['zmpi'].dependencies) + self.assertTrue('zmpi' in spec.dependencies) + self.assertTrue('callpath' in spec.dependencies) + self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies) + self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies) - self.assertNotIn('mpi', spec) + self.assertFalse('mpi' in spec) def test_my_dep_depends_on_provider_of_my_virtual_dep(self): diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py new file mode 100644 index 0000000000..3e52954cfe --- /dev/null +++ b/lib/spack/spack/test/directory_layout.py @@ -0,0 +1,155 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""\ +This test verifies that the Spack directory layout works properly. +""" +import unittest +import tempfile +import shutil +import os +from contextlib import closing + +from llnl.util.filesystem import * + +import spack +from spack.spec import Spec +from spack.packages import PackageDB +from spack.directory_layout import SpecHashDirectoryLayout + +class DirectoryLayoutTest(unittest.TestCase): + """Tests that a directory layout works correctly and produces a + consistent install path.""" + + def setUp(self): + self.tmpdir = tempfile.mkdtemp() + self.layout = SpecHashDirectoryLayout(self.tmpdir) + + + def tearDown(self): + shutil.rmtree(self.tmpdir, ignore_errors=True) + self.layout = None + + + def test_read_and_write_spec(self): + """This goes through each package in spack and creates a directory for + it. It then ensures that the spec for the directory's + installed package can be read back in consistently, and + finally that the directory can be removed by the directory + layout. + """ + for pkg in spack.db.all_packages(): + spec = pkg.spec + + # If a spec fails to concretize, just skip it. If it is a + # real error, it will be caught by concretization tests. + try: + spec.concretize() + except: + continue + + self.layout.make_path_for_spec(spec) + + install_dir = self.layout.path_for_spec(spec) + spec_path = self.layout.spec_file_path(spec) + + # Ensure directory has been created in right place. + self.assertTrue(os.path.isdir(install_dir)) + self.assertTrue(install_dir.startswith(self.tmpdir)) + + # Ensure spec file exists when directory is created + self.assertTrue(os.path.isfile(spec_path)) + self.assertTrue(spec_path.startswith(install_dir)) + + # Make sure spec file can be read back in to get the original spec + spec_from_file = self.layout.read_spec(spec_path) + self.assertEqual(spec, spec_from_file) + self.assertTrue(spec.eq_dag, spec_from_file) + self.assertTrue(spec_from_file.concrete) + + # Ensure that specs that come out "normal" are really normal. + with closing(open(spec_path)) as spec_file: + read_separately = Spec(spec_file.read()) + + read_separately.normalize() + self.assertEqual(read_separately, spec_from_file) + + read_separately.concretize() + self.assertEqual(read_separately, spec_from_file) + + # Make sure the dep hash of the read-in spec is the same + self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash()) + + # Ensure directories are properly removed + self.layout.remove_path_for_spec(spec) + self.assertFalse(os.path.isdir(install_dir)) + self.assertFalse(os.path.exists(install_dir)) + + + def test_handle_unknown_package(self): + """This test ensures that spack can at least do *some* + operations with packages that are installed but that it + does not know about. This is actually not such an uncommon + scenario with spack; it can happen when you switch from a + git branch where you're working on a new package. + + This test ensures that the directory layout stores enough + information about installed packages' specs to uninstall + or query them again if the package goes away. + """ + mock_db = PackageDB(spack.mock_packages_path) + + not_in_mock = set(spack.db.all_package_names()).difference( + set(mock_db.all_package_names())) + + # Create all the packages that are not in mock. + installed_specs = {} + for pkg_name in not_in_mock: + spec = spack.db.get(pkg_name).spec + + # If a spec fails to concretize, just skip it. If it is a + # real error, it will be caught by concretization tests. + try: + spec.concretize() + except: + continue + + self.layout.make_path_for_spec(spec) + installed_specs[spec] = self.layout.path_for_spec(spec) + + tmp = spack.db + spack.db = mock_db + + # Now check that even without the package files, we know + # enough to read a spec from the spec file. + for spec, path in installed_specs.items(): + spec_from_file = self.layout.read_spec(join_path(path, '.spec')) + + # To satisfy these conditions, directory layouts need to + # read in concrete specs from their install dirs somehow. + self.assertEqual(path, self.layout.path_for_spec(spec_from_file)) + self.assertEqual(spec, spec_from_file) + self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash()) + + spack.db = tmp diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index a92bd92289..8047ab92e3 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -25,15 +25,18 @@ import os import unittest import shutil +import tempfile from contextlib import closing from llnl.util.filesystem import * import spack from spack.stage import Stage +from spack.directory_layout import SpecHashDirectoryLayout from spack.util.executable import which from spack.test.mock_packages_test import * + dir_name = 'trivial-1.0' archive_name = 'trivial-1.0.tar.gz' install_test_package = 'trivial_install_test_package' @@ -66,9 +69,16 @@ class InstallTest(MockPackagesTest): tar = which('tar') tar('-czf', archive_name, dir_name) - # We use a fake pacakge, so skip the checksum. + # We use a fake package, so skip the checksum. spack.do_checksum = False + # Use a fake install directory to avoid conflicts bt/w + # installed pkgs and mock packages. + self.tmpdir = tempfile.mkdtemp() + self.orig_layout = spack.install_layout + spack.install_layout = SpecHashDirectoryLayout(self.tmpdir) + + def tearDown(self): super(InstallTest, self).tearDown() @@ -78,6 +88,10 @@ class InstallTest(MockPackagesTest): # Turn checksumming back on spack.do_checksum = True + # restore spack's layout. + spack.install_layout = self.orig_layout + shutil.rmtree(self.tmpdir, ignore_errors=True) + def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index adde70ff6c..e948376039 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -39,7 +39,6 @@ def set_pkg_dep(pkg, spec): class MockPackagesTest(unittest.TestCase): - @classmethod def setUp(self): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with @@ -52,7 +51,7 @@ class MockPackagesTest(unittest.TestCase): 'site' : spack.mock_site_config, 'user' : spack.mock_user_config } - @classmethod + def tearDown(self): """Restore the real packages path after any test.""" spack.db = self.real_db diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 1a7bc5dc5e..e3de695070 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -29,19 +29,35 @@ import unittest import spack import spack.url as url +from spack.packages import PackageDB + class PackageSanityTest(unittest.TestCase): - def test_get_all_packages(self): - """Get all packages once and make sure that works.""" + def check_db(self): + """Get all packages in a DB to make sure they work.""" for name in spack.db.all_package_names(): spack.db.get(name) + def test_get_all_packages(self): + """Get all packages once and make sure that works.""" + self.check_db() + + + def test_get_all_mock_packages(self): + """Get the mock packages once each too.""" + tmp = spack.db + spack.db = PackageDB(spack.mock_packages_path) + self.check_db() + spack.db = tmp + + def test_url_versions(self): - """Ensure that url_for_version does the right thing for at least the - default version of each package. - """ + """Check URLs for regular packages, if they are explicitly defined.""" for pkg in spack.db.all_packages(): - v = url.parse_version(pkg.url) - self.assertEqual(pkg.url, pkg.url_for_version(v)) + for v, vdesc in pkg.versions.items(): + if vdesc.url: + # If there is a url for the version check it. + v_url = pkg.url_for_version(v) + self.assertEqual(vdesc.url, v_url) diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py new file mode 100644 index 0000000000..04b4eadf34 --- /dev/null +++ b/lib/spack/spack/test/python_version.py @@ -0,0 +1,97 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +This test ensures that all Spack files are Python version 2.6 or less. + +Spack was originally 2.7, but enough systems in 2014 are still using +2.6 on their frontend nodes that we need 2.6 to get adopted. +""" +import unittest +import os +import re +from contextlib import closing + +import llnl.util.tty as tty + +from external import pyqver2 +import spack + +spack_max_version = (2,6) + +class PythonVersionTest(unittest.TestCase): + + def spack_python_files(self): + # first file is the spack script. + yield spack.spack_file + yield os.path.join(spack.build_env_path, 'cc') + + # Next files are all the source files and package files. + search_paths = [spack.lib_path, spack.var_path] + + # Iterate through the whole spack source tree. + for path in search_paths: + for root, dirnames, filenames in os.walk(path): + for filename in filenames: + if re.match(r'^[^.#].*\.py$', filename): + yield os.path.join(root, filename) + + + def test_python_versions(self): + # dict version -> filename -> reasons + all_issues = {} + + for fn in self.spack_python_files(): + with closing(open(fn)) as pyfile: + versions = pyqver2.get_versions(pyfile.read()) + for ver, reasons in versions.items(): + if ver > spack_max_version: + if not ver in all_issues: + all_issues[ver] = {} + all_issues[ver][fn] = reasons + + if all_issues: + tty.error("Spack must run on Python version %d.%d" + % spack_max_version) + + for v in sorted(all_issues.keys(), reverse=True): + msgs = [] + for fn in sorted(all_issues[v].keys()): + short_fn = fn + if fn.startswith(spack.prefix): + short_fn = fn[len(spack.prefix):] + + reasons = [r for r in set(all_issues[v][fn]) if r] + for r in reasons: + msgs.append(("%s:%s" % ('spack' + short_fn, r[0]), r[1])) + + tty.error("These files require version %d.%d:" % v) + maxlen = max(len(f) for f, prob in msgs) + fmt = "%%-%ds%%s" % (maxlen+3) + print fmt % ('File', 'Reason') + print fmt % ('-' * (maxlen), '-' * 20) + for msg in msgs: + print fmt % msg + + self.assertTrue(len(all_issues) == 0) diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 0c0b214ab7..fb67aa8a8d 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -48,7 +48,7 @@ class SpecDagTest(MockPackagesTest): spec.package.validate_dependencies) - def test_unique_node_traversal(self): + def test_preorder_node_traversal(self): dag = Spec('mpileaks ^zmpi') dag.normalize() @@ -56,14 +56,14 @@ class SpecDagTest(MockPackagesTest): 'zmpi', 'fake'] pairs = zip([0,1,2,3,4,2,3], names) - traversal = dag.preorder_traversal() - self.assertListEqual([x.name for x in traversal], names) + traversal = dag.traverse() + self.assertEqual([x.name for x in traversal], names) - traversal = dag.preorder_traversal(depth=True) - self.assertListEqual([(x, y.name) for x,y in traversal], pairs) + traversal = dag.traverse(depth=True) + self.assertEqual([(x, y.name) for x,y in traversal], pairs) - def test_unique_edge_traversal(self): + def test_preorder_edge_traversal(self): dag = Spec('mpileaks ^zmpi') dag.normalize() @@ -71,14 +71,14 @@ class SpecDagTest(MockPackagesTest): 'libelf', 'zmpi', 'fake', 'zmpi'] pairs = zip([0,1,2,3,4,3,2,3,1], names) - traversal = dag.preorder_traversal(cover='edges') - self.assertListEqual([x.name for x in traversal], names) + traversal = dag.traverse(cover='edges') + self.assertEqual([x.name for x in traversal], names) - traversal = dag.preorder_traversal(cover='edges', depth=True) - self.assertListEqual([(x, y.name) for x,y in traversal], pairs) + traversal = dag.traverse(cover='edges', depth=True) + self.assertEqual([(x, y.name) for x,y in traversal], pairs) - def test_unique_path_traversal(self): + def test_preorder_path_traversal(self): dag = Spec('mpileaks ^zmpi') dag.normalize() @@ -86,11 +86,56 @@ class SpecDagTest(MockPackagesTest): 'libelf', 'zmpi', 'fake', 'zmpi', 'fake'] pairs = zip([0,1,2,3,4,3,2,3,1,2], names) - traversal = dag.preorder_traversal(cover='paths') - self.assertListEqual([x.name for x in traversal], names) + traversal = dag.traverse(cover='paths') + self.assertEqual([x.name for x in traversal], names) - traversal = dag.preorder_traversal(cover='paths', depth=True) - self.assertListEqual([(x, y.name) for x,y in traversal], pairs) + traversal = dag.traverse(cover='paths', depth=True) + self.assertEqual([(x, y.name) for x,y in traversal], pairs) + + + def test_postorder_node_traversal(self): + dag = Spec('mpileaks ^zmpi') + dag.normalize() + + names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi', + 'callpath', 'mpileaks'] + pairs = zip([4,3,2,3,2,1,0], names) + + traversal = dag.traverse(order='post') + self.assertEqual([x.name for x in traversal], names) + + traversal = dag.traverse(depth=True, order='post') + self.assertEqual([(x, y.name) for x,y in traversal], pairs) + + + def test_postorder_edge_traversal(self): + dag = Spec('mpileaks ^zmpi') + dag.normalize() + + names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', + 'callpath', 'zmpi', 'mpileaks'] + pairs = zip([4,3,3,2,3,2,1,1,0], names) + + traversal = dag.traverse(cover='edges', order='post') + self.assertEqual([x.name for x in traversal], names) + + traversal = dag.traverse(cover='edges', depth=True, order='post') + self.assertEqual([(x, y.name) for x,y in traversal], pairs) + + + def test_postorder_path_traversal(self): + dag = Spec('mpileaks ^zmpi') + dag.normalize() + + names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', + 'callpath', 'fake', 'zmpi', 'mpileaks'] + pairs = zip([4,3,3,2,3,2,1,2,1,0], names) + + traversal = dag.traverse(cover='paths', order='post') + self.assertEqual([x.name for x in traversal], names) + + traversal = dag.traverse(cover='paths', depth=True, order='post') + self.assertEqual([(x, y.name) for x,y in traversal], pairs) def test_conflicting_spec_constraints(self): @@ -142,7 +187,7 @@ class SpecDagTest(MockPackagesTest): # make sure nothing with the same name occurs twice counts = {} - for spec in dag.preorder_traversal(keyfun=id): + for spec in dag.traverse(key=id): if not spec.name in counts: counts[spec.name] = 0 counts[spec.name] += 1 @@ -152,15 +197,15 @@ class SpecDagTest(MockPackagesTest): def check_links(self, spec_to_check): - for spec in spec_to_check.preorder_traversal(): + for spec in spec_to_check.traverse(): for dependent in spec.dependents.values(): - self.assertIn( - spec.name, dependent.dependencies, + self.assertTrue( + spec.name in dependent.dependencies, "%s not in dependencies of %s" % (spec.name, dependent.name)) for dependency in spec.dependencies.values(): - self.assertIn( - spec.name, dependency.dependents, + self.assertTrue( + spec.name in dependency.dependents, "%s not in dependents of %s" % (spec.name, dependency.name)) @@ -221,30 +266,53 @@ class SpecDagTest(MockPackagesTest): def test_equal(self): - spec = Spec('mpileaks ^callpath ^libelf ^libdwarf') - self.assertNotEqual(spec, Spec( - 'mpileaks', Spec('callpath', - Spec('libdwarf', - Spec('libelf'))))) - self.assertNotEqual(spec, Spec( - 'mpileaks', Spec('callpath', - Spec('libelf', - Spec('libdwarf'))))) + # Different spec structures to test for equality + flat = Spec('mpileaks ^callpath ^libelf ^libdwarf') + + flat_init = Spec( + 'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf')) + + flip_flat = Spec( + 'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath')) + + dag = Spec('mpileaks', Spec('callpath', + Spec('libdwarf', + Spec('libelf')))) - self.assertEqual(spec, Spec( - 'mpileaks', Spec('callpath'), Spec('libdwarf'), Spec('libelf'))) + flip_dag = Spec('mpileaks', Spec('callpath', + Spec('libelf', + Spec('libdwarf')))) - self.assertEqual(spec, Spec( - 'mpileaks', Spec('libelf'), Spec('libdwarf'), Spec('callpath'))) + # All these are equal to each other with regular == + specs = (flat, flat_init, flip_flat, dag, flip_dag) + for lhs, rhs in zip(specs, specs): + self.assertEqual(lhs, rhs) + self.assertEqual(str(lhs), str(rhs)) + + # Same DAGs constructed different ways are equal + self.assertTrue(flat.eq_dag(flat_init)) + + # order at same level does not matter -- (dep on same parent) + self.assertTrue(flat.eq_dag(flip_flat)) + + # DAGs should be unequal if nesting is different + self.assertFalse(flat.eq_dag(dag)) + self.assertFalse(flat.eq_dag(flip_dag)) + self.assertFalse(flip_flat.eq_dag(dag)) + self.assertFalse(flip_flat.eq_dag(flip_dag)) + self.assertFalse(dag.eq_dag(flip_dag)) def test_normalize_mpileaks(self): + # Spec parsed in from a string spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf') + # What that spec should look like after parsing expected_flat = Spec( 'mpileaks', Spec('mpich'), Spec('callpath'), Spec('dyninst'), Spec('libelf@1.8.11'), Spec('libdwarf')) + # What it should look like after normalization mpich = Spec('mpich') libelf = Spec('libelf@1.8.11') expected_normalized = Spec( @@ -257,7 +325,10 @@ class SpecDagTest(MockPackagesTest): mpich), mpich) - expected_non_unique_nodes = Spec( + # Similar to normalized spec, but now with copies of the same + # libelf node. Normalization should result in a single unique + # node for each package, so this is the wrong DAG. + non_unique_nodes = Spec( 'mpileaks', Spec('callpath', Spec('dyninst', @@ -267,21 +338,33 @@ class SpecDagTest(MockPackagesTest): mpich), Spec('mpich')) - self.assertEqual(expected_normalized, expected_non_unique_nodes) - - self.assertEqual(str(expected_normalized), str(expected_non_unique_nodes)) - self.assertEqual(str(spec), str(expected_non_unique_nodes)) - self.assertEqual(str(expected_normalized), str(spec)) + # All specs here should be equal under regular equality + specs = (spec, expected_flat, expected_normalized, non_unique_nodes) + for lhs, rhs in zip(specs, specs): + self.assertEqual(lhs, rhs) + self.assertEqual(str(lhs), str(rhs)) + # Test that equal and equal_dag are doing the right thing self.assertEqual(spec, expected_flat) - self.assertNotEqual(spec, expected_normalized) - self.assertNotEqual(spec, expected_non_unique_nodes) + self.assertTrue(spec.eq_dag(expected_flat)) + + self.assertEqual(spec, expected_normalized) + self.assertFalse(spec.eq_dag(expected_normalized)) + + self.assertEqual(spec, non_unique_nodes) + self.assertFalse(spec.eq_dag(non_unique_nodes)) spec.normalize() - self.assertNotEqual(spec, expected_flat) + # After normalizing, spec_dag_equal should match the normalized spec. + self.assertEqual(spec, expected_flat) + self.assertFalse(spec.eq_dag(expected_flat)) + self.assertEqual(spec, expected_normalized) - self.assertEqual(spec, expected_non_unique_nodes) + self.assertTrue(spec.eq_dag(expected_normalized)) + + self.assertEqual(spec, non_unique_nodes) + self.assertFalse(spec.eq_dag(non_unique_nodes)) def test_normalize_with_virtual_package(self): @@ -302,10 +385,63 @@ class SpecDagTest(MockPackagesTest): def test_contains(self): spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf') - self.assertIn(Spec('mpi'), spec) - self.assertIn(Spec('libelf'), spec) - self.assertIn(Spec('libelf@1.8.11'), spec) - self.assertNotIn(Spec('libelf@1.8.12'), spec) - self.assertIn(Spec('libdwarf'), spec) - self.assertNotIn(Spec('libgoblin'), spec) - self.assertIn(Spec('mpileaks'), spec) + self.assertTrue(Spec('mpi') in spec) + self.assertTrue(Spec('libelf') in spec) + self.assertTrue(Spec('libelf@1.8.11') in spec) + self.assertFalse(Spec('libelf@1.8.12') in spec) + self.assertTrue(Spec('libdwarf') in spec) + self.assertFalse(Spec('libgoblin') in spec) + self.assertTrue(Spec('mpileaks') in spec) + + + def test_copy_simple(self): + orig = Spec('mpileaks') + copy = orig.copy() + + self.check_links(copy) + + self.assertEqual(orig, copy) + self.assertTrue(orig.eq_dag(copy)) + self.assertEqual(orig._normal, copy._normal) + self.assertEqual(orig._concrete, copy._concrete) + + # ensure no shared nodes bt/w orig and copy. + orig_ids = set(id(s) for s in orig.traverse()) + copy_ids = set(id(s) for s in copy.traverse()) + self.assertFalse(orig_ids.intersection(copy_ids)) + + + def test_copy_normalized(self): + orig = Spec('mpileaks') + orig.normalize() + copy = orig.copy() + + self.check_links(copy) + + self.assertEqual(orig, copy) + self.assertTrue(orig.eq_dag(copy)) + self.assertEqual(orig._normal, copy._normal) + self.assertEqual(orig._concrete, copy._concrete) + + # ensure no shared nodes bt/w orig and copy. + orig_ids = set(id(s) for s in orig.traverse()) + copy_ids = set(id(s) for s in copy.traverse()) + self.assertFalse(orig_ids.intersection(copy_ids)) + + + def test_copy_concretized(self): + orig = Spec('mpileaks') + orig.concretize() + copy = orig.copy() + + self.check_links(copy) + + self.assertEqual(orig, copy) + self.assertTrue(orig.eq_dag(copy)) + self.assertEqual(orig._normal, copy._normal) + self.assertEqual(orig._concrete, copy._concrete) + + # ensure no shared nodes bt/w orig and copy. + orig_ids = set(id(s) for s in orig.traverse()) + copy_ids = set(id(s) for s in copy.traverse()) + self.assertFalse(orig_ids.intersection(copy_ids)) diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index 08899f9810..a412549dc7 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -51,28 +51,20 @@ readme_text = "hello world!\n" stage_name = 'spack-test-stage' -class with_tmp(object): - """Decorator that executes a function with or without spack set to use - a temp dir. Spack allows builds to happen directly in the - stage directory or in a tmp dir and symlinked into the stage - directory, so this lets us use the same test in both cases. +@contextmanager +def use_tmp(use_tmp): + """Allow some test code to be executed with spack.use_tmp_stage + set to a certain value. Context manager makes sure it's reset + on failure. """ - def __init__(self, use_tmp): - self.use_tmp = use_tmp - - def __call__(self, fun): - use_tmp = self.use_tmp - def new_test_function(self): - old_tmp = spack.use_tmp_stage - spack.use_tmp_stage = use_tmp - fun(self) - spack.use_tmp_stage = old_tmp - return new_test_function + old_tmp = spack.use_tmp_stage + spack.use_tmp_stage = use_tmp + yield + spack.use_tmp_stage = old_tmp class StageTest(unittest.TestCase): - @classmethod - def setUpClass(cls): + def setUp(self): """This sets up a mock archive to fetch, and a mock temp space for use by the Stage class. It doesn't actually create the Stage -- that is done by individual tests. @@ -92,52 +84,58 @@ class StageTest(unittest.TestCase): tar('czf', archive_name, archive_dir) # Make spack use the test environment for tmp stuff. - cls.old_tmp_dirs = spack.tmp_dirs + self.old_tmp_dirs = spack.tmp_dirs spack.tmp_dirs = [test_tmp_path] + # record this since this test changes to directories that will + # be removed. + self.working_dir = os.getcwd() + - @classmethod - def tearDownClass(cls): + def tearDown(self): """Blows away the test environment directory.""" shutil.rmtree(test_files_dir) + # chdir back to original working dir + os.chdir(self.working_dir) + # restore spack's original tmp environment - spack.tmp_dirs = cls.old_tmp_dirs + spack.tmp_dirs = self.old_tmp_dirs def get_stage_path(self, stage, stage_name): - """Figure out based on a stage and an intended name where it should - be living. This depends on whether it's named or not. + """Figure out where a stage should be living. This depends on + whether it's named. """ - if stage_name: + if stage_name is not None: # If it is a named stage, we know where the stage should be - stage_path = join_path(spack.stage_path, stage_name) + return join_path(spack.stage_path, stage_name) else: # If it's unnamed, ensure that we ran mkdtemp in the right spot. - stage_path = stage.path - self.assertIsNotNone(stage_path) - self.assertEqual( - os.path.commonprefix((stage_path, spack.stage_path)), - spack.stage_path) - return stage_path + self.assertTrue(stage.path is not None) + self.assertTrue(stage.path.startswith(spack.stage_path)) + return stage.path def check_setup(self, stage, stage_name): """Figure out whether a stage was set up correctly.""" stage_path = self.get_stage_path(stage, stage_name) + + # Ensure stage was created in the spack stage directory self.assertTrue(os.path.isdir(stage_path)) if spack.use_tmp_stage: - # Make sure everything was created and linked correctly for - # a tmp stage. + # Check that the stage dir is really a symlink. self.assertTrue(os.path.islink(stage_path)) + # Make sure it points to a valid directory target = os.path.realpath(stage_path) self.assertTrue(os.path.isdir(target)) self.assertFalse(os.path.islink(target)) - self.assertEqual( - os.path.commonprefix((target, test_tmp_path)), - test_tmp_path) + + # Make sure the directory is in the place we asked it to + # be (see setUp and tearDown) + self.assertTrue(target.startswith(test_tmp_path)) else: # Make sure the stage path is NOT a link for a non-tmp stage @@ -146,15 +144,15 @@ class StageTest(unittest.TestCase): def check_fetch(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) - self.assertIn(archive_name, os.listdir(stage_path)) + self.assertTrue(archive_name in os.listdir(stage_path)) self.assertEqual(join_path(stage_path, archive_name), stage.archive_file) def check_expand_archive(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) - self.assertIn(archive_name, os.listdir(stage_path)) - self.assertIn(archive_dir, os.listdir(stage_path)) + self.assertTrue(archive_name in os.listdir(stage_path)) + self.assertTrue(archive_dir in os.listdir(stage_path)) self.assertEqual( join_path(stage_path, archive_dir), @@ -192,32 +190,40 @@ class StageTest(unittest.TestCase): self.assertFalse(os.path.exists(target)) - def checkSetupAndDestroy(self, stage_name=None): - stage = Stage(archive_url, name=stage_name) - self.check_setup(stage, stage_name) - - stage.destroy() - self.check_destroy(stage, stage_name) - - - @with_tmp(True) def test_setup_and_destroy_name_with_tmp(self): - self.checkSetupAndDestroy(stage_name) + with use_tmp(True): + stage = Stage(archive_url, name=stage_name) + self.check_setup(stage, stage_name) + + stage.destroy() + self.check_destroy(stage, stage_name) - @with_tmp(False) def test_setup_and_destroy_name_without_tmp(self): - self.checkSetupAndDestroy(stage_name) + with use_tmp(False): + stage = Stage(archive_url, name=stage_name) + self.check_setup(stage, stage_name) + + stage.destroy() + self.check_destroy(stage, stage_name) - @with_tmp(True) def test_setup_and_destroy_no_name_with_tmp(self): - self.checkSetupAndDestroy(None) + with use_tmp(True): + stage = Stage(archive_url) + self.check_setup(stage, None) + + stage.destroy() + self.check_destroy(stage, None) - @with_tmp(False) def test_setup_and_destroy_no_name_without_tmp(self): - self.checkSetupAndDestroy(None) + with use_tmp(False): + stage = Stage(archive_url) + self.check_setup(stage, None) + + stage.destroy() + self.check_destroy(stage, None) def test_chdir(self): @@ -286,7 +292,7 @@ class StageTest(unittest.TestCase): with closing(open('foobar', 'w')) as file: file.write("this file is to be destroyed.") - self.assertIn('foobar', os.listdir(stage.expanded_archive_path)) + self.assertTrue('foobar' in os.listdir(stage.expanded_archive_path)) # Make sure the file is not there after restage. stage.restage() @@ -295,7 +301,7 @@ class StageTest(unittest.TestCase): stage.chdir_to_archive() self.check_chdir_to_archive(stage, stage_name) - self.assertNotIn('foobar', os.listdir(stage.expanded_archive_path)) + self.assertFalse('foobar' in os.listdir(stage.expanded_archive_path)) stage.destroy() self.check_destroy(stage, stage_name) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 1b8120168f..902ce9817d 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -82,12 +82,16 @@ def parse_version_string_with_indices(path): """Try to extract a version string from a filename or URL. This is taken largely from Homebrew's Version class.""" - if os.path.isdir(path): - stem = os.path.basename(path) - elif re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path): - stem = comp.stem(os.path.dirname(path)) - else: - stem = comp.stem(path) + # Strip off sourceforge download stuffix. + if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path): + path = os.path.dirname(path) + + # Strip archive extension + path = comp.strip_extension(path) + + # Take basename to avoid including parent dirs in version name + # Remember the offset of the stem in the full path. + stem = os.path.basename(path) version_types = [ # GitHub tarballs, e.g. v1.2.3 @@ -137,10 +141,10 @@ def parse_version_string_with_indices(path): (r'_((\d+\.)+\d+[a-z]?)[.]orig$', stem), # e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz - (r'-([^-]+)', stem), + (r'-([^-]+(-alpha|-beta)?)', stem), # e.g. astyle_1.23_macosx.tar.gz - (r'_([^_]+)', stem), + (r'_([^_]+(_alpha|_beta)?)', stem), # e.g. http://mirrors.jenkins-ci.org/war/1.486/jenkins.war (r'\/(\d\.\d+)\/', path), @@ -152,7 +156,9 @@ def parse_version_string_with_indices(path): regex, match_string = vtype[:2] match = re.search(regex, match_string) if match and match.group(1) is not None: - return match.group(1), match.start(1), match.end(1) + version = match.group(1) + start = path.index(version) + return version, start, start+len(version) raise UndetectableVersionError(path) diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index 7ce8e8c65b..a67576bd50 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -48,7 +48,7 @@ def decompressor_for(path): return tar -def stem(path): +def strip_extension(path): """Get the part of a path that does not include its compressed type extension.""" for type in ALLOWED_ARCHIVE_TYPES: diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 4d8681bed9..950e807596 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -35,7 +35,7 @@ _acceptable_hashes = [ hashlib.sha512 ] """Index for looking up hasher for a digest.""" -_size_to_hash = { h().digest_size : h for h in _acceptable_hashes } +_size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes) def checksum(hashlib_algo, filename, **kwargs): diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index bc27b25889..923c7c19a5 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -121,7 +121,7 @@ def which(name, **kwargs): for dir in path: exe = os.path.join(dir, name) - if os.access(exe, os.X_OK): + if os.path.isfile(exe) and os.access(exe, os.X_OK): return Executable(exe) if required: diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py index e7a9a6862e..234163bf52 100644 --- a/lib/spack/spack/util/string.py +++ b/lib/spack/spack/util/string.py @@ -33,7 +33,9 @@ def comma_list(sequence, article=''): return sequence[0] else: out = ', '.join(str(s) for s in sequence[:-1]) - out += ', ' + if len(sequence) != 2: + out += ',' # oxford comma + out += ' ' if article: out += article + ' ' out += str(sequence[-1]) diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index ce94303a9c..fbf86db8e1 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -47,7 +47,8 @@ import os import sys import re from bisect import bisect_left -from functools import total_ordering, wraps +from functools import wraps +from external.functools import total_ordering import llnl.util.compare.none_high as none_high import llnl.util.compare.none_low as none_low @@ -181,7 +182,7 @@ class Version(object): # Add possible alpha or beta indicator at the end of each segemnt # We treat these specially b/c they're so common. - wc += '[ab]?)?' * (len(segments) - 1) + wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1) return wc diff --git a/share/spack/csh/pathadd.csh b/share/spack/csh/pathadd.csh new file mode 100644 index 0000000000..1e0800c5f3 --- /dev/null +++ b/share/spack/csh/pathadd.csh @@ -0,0 +1,23 @@ +######################################################################## +# Prepends directories to path, if they exist. +# pathadd /path/to/dir # add to PATH +# or pathadd OTHERPATH /path/to/dir # add to OTHERPATH +######################################################################## +# If no variable name is supplied, just append to PATH +# otherwise append to that variable. +set _pa_varname = PATH; +set _pa_new_path = $_pa_args[1]; +[ $#_pa_args -gt 1 ] && set _pa_varname = $_pa_args[1] && set _pa_new_path = $_pa_args[2]; + +# Check whether the variable is set yet. +set _pa_old_value = "" +eval set _pa_set = '$?'$_pa_varname +[ $_pa_set -eq 1 ] && eval set _pa_old_value='$'$_pa_varname; + +# Do the actual prepending here, if it is a dir and not already in the path +if ( -d $_pa_new_path && \:$_pa_old_value\: !~ *\:$_pa_new_path\:* ) then + [ -n "$_pa_old_value" ] && setenv $_pa_varname $_pa_new_path\:$_pa_old_value + [ -z "$_pa_old_value" ] && setenv $_pa_varname $_pa_new_path +endif + +unset _pa_args _pa_new_path _pa_old_value _pa_set _pa_varname diff --git a/share/spack/csh/spack.csh b/share/spack/csh/spack.csh new file mode 100644 index 0000000000..30c4ec1361 --- /dev/null +++ b/share/spack/csh/spack.csh @@ -0,0 +1,101 @@ +######################################################################## +# This is a wrapper around the spack command that forwards calls to +# 'spack use' and 'spack unuse' to shell functions. This in turn +# allows them to be used to invoke dotkit functions. +# +# 'spack use' is smarter than just 'use' because it converts its +# arguments into a unique spack spec that is then passed to dotkit +# commands. This allows the user to use packages without knowing all +# their installation details. +# +# e.g., rather than requring a full spec for libelf, the user can type: +# +# spack use libelf +# +# This will first find the available libelf dotkits and use a +# matching one. If there are two versions of libelf, the user would +# need to be more specific, e.g.: +# +# spack use libelf@0.8.13 +# +# This is very similar to how regular spack commands work and it +# avoids the need to come up with a user-friendly naming scheme for +# spack dotfiles. +######################################################################## +# accumulate initial flags for main spack command +set _sp_flags = "" +while ( $#_sp_args > 0 ) + if ( "$_sp_args[1]" !~ "-*" ) break + set _sp_flags = "$_sp_flags $_sp_args[1]" + shift _sp_args +end + +# h and V flags don't require further output parsing. +if ( "$_sp_flags" =~ *h* || "$_sp_flags" =~ *V* ) then + \spack $_sp_flags $_sp_args + goto _sp_end +endif + +# Set up args -- we want a subcommand and a spec. +set _sp_subcommand="" +set _sp_spec="" +[ $#_sp_args -gt 0 ] && set _sp_subcommand = ($_sp_args[1]) +[ $#_sp_args -gt 1 ] && set _sp_spec = ($_sp_args[2-]) + +# Figure out what type of module we're running here. +set _sp_modtype = "" +switch ($_sp_subcommand) +case cd: + shift _sp_args + cd `spack location $_sp_args` + breaksw +case use: +case unuse: +case load: +case unload: + set _sp_module_args="""" + if ( "$_sp_spec" =~ "-*" ) then + set _sp_module_args = $_sp_spec[1] + shift _sp_spec + set _sp_spec = ($_sp_spec) + endif + + # Here the user has run use or unuse with a spec. Find a matching + # spec using 'spack module find', then use the appropriate module + # tool's commands to add/remove the result from the environment. + switch ($_sp_subcommand) + case "use": + set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) + if ( $? == 0 ) then + use $_sp_module_args $_sp_full_spec + endif + breaksw + case "unuse": + set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" ) + if ( $? == 0 ) then + unuse $_sp_module_args $_sp_full_spec + endif + breaksw + case "load": + set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) + if ( $? == 0 ) then + module load $_sp_module_args $_sp_full_spec + endif + breaksw + case "unload": + set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" ) + if ( $? == 0 ) then + module unload $_sp_module_args $_sp_full_spec + endif + breaksw + endsw + breaksw + +default: + \spack $_sp_args + breaksw +endsw + +_sp_end: +unset _sp_args _sp_full_spec _sp_modtype _sp_module_args +unset _sp_sh_cmd _sp_spec _sp_subcommand _sp_flags diff --git a/share/spack/setup-env.bash b/share/spack/setup-env.bash deleted file mode 100755 index e22a259167..0000000000 --- a/share/spack/setup-env.bash +++ /dev/null @@ -1,132 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## - -# -# -# This file is part of Spack and sets up the spack environment for -# bash shells. This includes dotkit support as well as putting spack -# in your path. Source it like this: -# -# . /path/to/spack/share/spack/setup-env.bash -# -# - - -######################################################################## -# This is a wrapper around the spack command that forwards calls to -# 'spack use' and 'spack unuse' to shell functions. This in turn -# allows them to be used to invoke dotkit functions. -# -# 'spack use' is smarter than just 'use' because it converts its -# arguments into a unique spack spec that is then passed to dotkit -# commands. This allows the user to use packages without knowing all -# their installation details. -# -# e.g., rather than requring a full spec for libelf, the user can type: -# -# spack use libelf -# -# This will first find the available libelf dotkits and use a -# matching one. If there are two versions of libelf, the user would -# need to be more specific, e.g.: -# -# spack use libelf@0.8.13 -# -# This is very similar to how regular spack commands work and it -# avoids the need to come up with a user-friendly naming scheme for -# spack dotfiles. -######################################################################## -function spack { - _spack_subcommand=$1; shift - _spack_spec="$@" - - # Filter out use and unuse. For any other commands, just run the - # command. - case $_spack_subcommand in - "use") ;; - "unuse") ;; - *) - command spack $_spack_subcommand "$@" - return - ;; - esac - - # If no args or -h, just run that command as well. - if [ -z "$1" -o "$1" = "-h" ]; then - command spack $_spack_subcommand -h - return - fi - - # Shift any other args for use off before parsing spec. - _spack_use_args="" - if [[ "$1" =~ ^- ]]; then - _spack_use_args="$1"; shift - _spack_spec="$@" - fi - - # Here the user has run use or unuse with a spec. Find a matching - # spec with a dotkit using spack dotkit, then use or unuse the - # result. If spack dotkit comes back with an error, do nothing. - if _spack_full_spec=$(command spack dotkit $_spack_spec); then - $_spack_subcommand $_spack_use_args $_spack_full_spec - fi -} - -######################################################################## -# Prepends directories to path, if they exist. -# pathadd /path/to/dir # add to PATH -# or pathadd OTHERPATH /path/to/dir # add to OTHERPATH -######################################################################## -function _spack_pathadd { - # If no variable name is supplied, just append to PATH - # otherwise append to that variable. - varname=PATH - path="$1" - if [ -n "$2" ]; then - varname="$1" - path="$2" - fi - - # Do the actual prepending here. - eval "oldvalue=\"\$$varname\"" - if [ -d "$path" ] && [[ ":$oldvalue:" != *":$path:"* ]]; then - if [ -n "$oldvalue" ]; then - eval "export $varname=\"$path:$oldvalue\"" - else - export $varname="$path" - fi - fi -} - - -# -# Set up dotkit and path in the user environment -# -_spack_share_dir="$(dirname ${BASH_SOURCE[0]})" -_spack_prefix="$(dirname $(dirname $_spack_share_dir))" - -_spack_pathadd DK_NODE "$_spack_share_dir/dotkit" -_spack_pathadd PATH "$_spack_prefix/bin" - diff --git a/share/spack/setup-env.csh b/share/spack/setup-env.csh new file mode 100755 index 0000000000..5f91670a60 --- /dev/null +++ b/share/spack/setup-env.csh @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +# +# This file is part of Spack and sets up the spack environment for +# csh and tcsh. This includes dotkit support, module support, and +# it also puts spack in your path. Source it like this: +# +# setenv SPACK_ROOT /path/to/spack +# source $SPACK_ROOT/share/spack/setup-env.csh +# +if ($?SPACK_ROOT) then + set _spack_source_file = $SPACK_ROOT/share/spack/setup-env.csh + set _spack_share_dir = $SPACK_ROOT/share/spack + + # Command aliases point at separate source files + alias spack 'set _sp_args = (\!*); source $_spack_share_dir/csh/spack.csh' + alias _spack_pathadd 'set _pa_args = (\!*) && source $_spack_share_dir/csh/pathadd.csh' + + # Set up modules and dotkit search paths in the user environment + # TODO: fix SYS_TYPE to something non-LLNL-specific + _spack_pathadd DK_NODE "$_spack_share_dir/dotkit/$SYS_TYPE" + _spack_pathadd MODULEPATH "$_spack_share_dir/modules/$SYS_TYPE" + _spack_pathadd PATH "$SPACK_ROOT/bin" +endif diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh new file mode 100755 index 0000000000..6f56d4739b --- /dev/null +++ b/share/spack/setup-env.sh @@ -0,0 +1,169 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +# +# This file is part of Spack and sets up the spack environment for +# bash and zsh. This includes dotkit support, module support, and +# it also puts spack in your path. Source it like this: +# +# . /path/to/spack/share/spack/setup-env.sh +# + +######################################################################## +# This is a wrapper around the spack command that forwards calls to +# 'spack use' and 'spack unuse' to shell functions. This in turn +# allows them to be used to invoke dotkit functions. +# +# 'spack use' is smarter than just 'use' because it converts its +# arguments into a unique spack spec that is then passed to dotkit +# commands. This allows the user to use packages without knowing all +# their installation details. +# +# e.g., rather than requring a full spec for libelf, the user can type: +# +# spack use libelf +# +# This will first find the available libelf dotkits and use a +# matching one. If there are two versions of libelf, the user would +# need to be more specific, e.g.: +# +# spack use libelf@0.8.13 +# +# This is very similar to how regular spack commands work and it +# avoids the need to come up with a user-friendly naming scheme for +# spack dotfiles. +######################################################################## +function spack { + # accumulate initial flags for main spack command + _sp_flags="" + while [[ "$1" =~ ^- ]]; do + _sp_flags="$_sp_flags $1" + shift + done + + # h and V flags don't require further output parsing. + if [[ "$_sp_flags" =~ *h* || "$_sp_flags" =~ *V* ]]; then + command spack $_sp_flags "$@" + return + fi + + _sp_subcommand=$1; shift + _sp_spec="$@" + + # Filter out use and unuse. For any other commands, just run the + # command. + case $_sp_subcommand in + "cd") + cd $(spack location "$@") + return + ;; + "use"|"unuse"|"load"|"unload") + # Shift any other args for use off before parsing spec. + _sp_module_args="" + if [[ "$1" =~ ^- ]]; then + _sp_module_args="$1"; shift + _sp_spec="$@" + fi + + # Here the user has run use or unuse with a spec. Find a matching + # spec using 'spack module find', then use the appropriate module + # tool's commands to add/remove the result from the environment. + # If spack module command comes back with an error, do nothing. + case $_sp_subcommand in + "use") + if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then + use $_sp_module_args $_sp_full_spec + fi ;; + "unuse") + if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then + unuse $_sp_module_args $_sp_full_spec + fi ;; + "load") + if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then + module load $_sp_module_args $_sp_full_spec + fi ;; + "unload") + if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then + module unload $_sp_module_args $_sp_full_spec + fi ;; + esac + ;; + *) + command spack $_sp_flags $_sp_subcommand $_sp_spec + ;; + esac +} + +######################################################################## +# Prepends directories to path, if they exist. +# pathadd /path/to/dir # add to PATH +# or pathadd OTHERPATH /path/to/dir # add to OTHERPATH +######################################################################## +function _spack_pathadd { + # If no variable name is supplied, just append to PATH + # otherwise append to that variable. + _pa_varname=PATH + _pa_new_path="$1" + if [ -n "$2" ]; then + _pa_varname="$1" + _pa_new_path="$2" + fi + + # Do the actual prepending here. + eval "_pa_oldvalue=\$${_pa_varname}" + + if [ -d "$_pa_new_path" ] && [[ ":$_pa_oldvalue:" != *":$_pa_new_path:"* ]]; then + if [ -n "$_pa_oldvalue" ]; then + eval "export $_pa_varname=\"$_pa_new_path:$_pa_oldvalue\"" + else + export $_pa_varname="$_pa_new_path" + fi + fi +} + +# +# Figure out where this file is. Below code needs to be portable to +# bash and zsh. +# +_sp_source_file="${BASH_SOURCE[0]}" # Bash's location of last sourced file. +if [ -z "$_sp_source_file" ]; then + _sp_source_file="$0:A" # zsh way to do it + if [[ "$_sp_source_file" == *":A" ]]; then + # Not zsh either... bail out with plain old $0, + # which WILL NOT work if this is sourced indirectly. + _sp_source_file="$0" + fi +fi + +# +# Set up modules and dotkit search paths in the user environment +# +_sp_share_dir="$(dirname $_sp_source_file)" +_sp_prefix="$(dirname $(dirname $_sp_share_dir))" + +# TODO: fix SYS_TYPE to something non-LLNL-specific +_spack_pathadd DK_NODE "$_sp_share_dir/dotkit/$SYS_TYPE" +_spack_pathadd MODULEPATH "$_sp_share_dir/modules/$SYS_TYPE" +_spack_pathadd PATH "$_sp_prefix/bin" diff --git a/var/spack/mock_packages/callpath/package.py b/var/spack/mock_packages/callpath/package.py index b4fd0f4482..5b6b70ba2a 100644 --- a/var/spack/mock_packages/callpath/package.py +++ b/var/spack/mock_packages/callpath/package.py @@ -28,9 +28,9 @@ class Callpath(Package): homepage = "https://github.com/tgamblin/callpath" url = "http://github.com/tgamblin/callpath-1.0.tar.gz" - versions = { 0.8 : 'foobarbaz', - 0.9 : 'foobarbaz', - 1.0 : 'foobarbaz' } + version(0.8, 'foobarbaz') + version(0.9, 'foobarbaz') + version(1.0, 'foobarbaz') depends_on("dyninst") depends_on("mpi") diff --git a/var/spack/mock_packages/direct_mpich/package.py b/var/spack/mock_packages/direct_mpich/package.py index d702e4481b..2ced82521b 100644 --- a/var/spack/mock_packages/direct_mpich/package.py +++ b/var/spack/mock_packages/direct_mpich/package.py @@ -28,7 +28,7 @@ class DirectMpich(Package): homepage = "http://www.example.com" url = "http://www.example.com/direct_mpich-1.0.tar.gz" - versions = { 1.0 : 'foobarbaz' } + version('1.0', 'foobarbaz') depends_on('mpich') diff --git a/var/spack/mock_packages/dyninst/package.py b/var/spack/mock_packages/dyninst/package.py index d32c4b5504..7657e2c33f 100644 --- a/var/spack/mock_packages/dyninst/package.py +++ b/var/spack/mock_packages/dyninst/package.py @@ -29,9 +29,8 @@ class Dyninst(Package): url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" list_url = "http://www.dyninst.org/downloads/dyninst-8.x" - versions = { - '8.1.2' : 'bf03b33375afa66fe0efa46ce3f4b17a', - '8.1.1' : '1f8743e3a5662b25ce64a7edf647e77d' } + version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a') + version('8.1.1', '1f8743e3a5662b25ce64a7edf647e77d') depends_on("libelf") depends_on("libdwarf") diff --git a/var/spack/mock_packages/fake/package.py b/var/spack/mock_packages/fake/package.py index 88bc6d8669..fb3c2bdd2e 100644 --- a/var/spack/mock_packages/fake/package.py +++ b/var/spack/mock_packages/fake/package.py @@ -27,7 +27,8 @@ from spack import * class Fake(Package): homepage = "http://www.fake-spack-example.org" url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz" - versions = { '1.0' : 'foobarbaz' } + + version('1.0', 'foobarbaz') def install(self, spec, prefix): pass diff --git a/var/spack/mock_packages/indirect_mpich/package.py b/var/spack/mock_packages/indirect_mpich/package.py index a53cb9330c..daf8b4b166 100644 --- a/var/spack/mock_packages/indirect_mpich/package.py +++ b/var/spack/mock_packages/indirect_mpich/package.py @@ -32,7 +32,7 @@ class IndirectMpich(Package): homepage = "http://www.example.com" url = "http://www.example.com/indirect_mpich-1.0.tar.gz" - versions = { 1.0 : 'foobarbaz' } + version(1.0, 'foobarbaz') depends_on('mpi') depends_on('direct_mpich') diff --git a/var/spack/mock_packages/libdwarf/package.py b/var/spack/mock_packages/libdwarf/package.py index 0f4d55fd88..0b8df04cfb 100644 --- a/var/spack/mock_packages/libdwarf/package.py +++ b/var/spack/mock_packages/libdwarf/package.py @@ -33,10 +33,10 @@ class Libdwarf(Package): url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" list_url = homepage - versions = { 20130729 : "64b42692e947d5180e162e46c689dfbf", - 20130207 : 'foobarbaz', - 20111030 : 'foobarbaz', - 20070703 : 'foobarbaz' } + version(20130729, "64b42692e947d5180e162e46c689dfbf") + version(20130207, 'foobarbaz') + version(20111030, 'foobarbaz') + version(20070703, 'foobarbaz') depends_on("libelf") diff --git a/var/spack/mock_packages/libelf/package.py b/var/spack/mock_packages/libelf/package.py index 5ac07de4e3..94c8f942cd 100644 --- a/var/spack/mock_packages/libelf/package.py +++ b/var/spack/mock_packages/libelf/package.py @@ -28,9 +28,9 @@ class Libelf(Package): homepage = "http://www.mr511.de/software/english.html" url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" - versions = {'0.8.13' : '4136d7b4c04df68b686570afa26988ac', - '0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7', - '0.8.10' : '9db4d36c283d9790d8fa7df1f4d7b4d9' } + version('0.8.13', '4136d7b4c04df68b686570afa26988ac') + version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') + version('0.8.10', '9db4d36c283d9790d8fa7df1f4d7b4d9') def install(self, spec, prefix): configure("--prefix=%s" % prefix, diff --git a/var/spack/mock_packages/mpich/package.py b/var/spack/mock_packages/mpich/package.py index 2a8e1cebe3..e86c1a68ac 100644 --- a/var/spack/mock_packages/mpich/package.py +++ b/var/spack/mock_packages/mpich/package.py @@ -30,11 +30,11 @@ class Mpich(Package): list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 - versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0', - '3.0.3' : 'foobarbaz', - '3.0.2' : 'foobarbaz', - '3.0.1' : 'foobarbaz', - '3.0' : 'foobarbaz' } + version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + version('3.0.3', 'foobarbaz') + version('3.0.2', 'foobarbaz') + version('3.0.1', 'foobarbaz') + version('3.0', 'foobarbaz') provides('mpi@:3', when='@3:') provides('mpi@:1', when='@1:') diff --git a/var/spack/mock_packages/mpich2/package.py b/var/spack/mock_packages/mpich2/package.py index 84dce4cccb..827b94c8a4 100644 --- a/var/spack/mock_packages/mpich2/package.py +++ b/var/spack/mock_packages/mpich2/package.py @@ -30,12 +30,12 @@ class Mpich2(Package): list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 - versions = { '1.5' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0', - '1.4' : 'foobarbaz', - '1.3' : 'foobarbaz', - '1.2' : 'foobarbaz', - '1.1' : 'foobarbaz', - '1.0' : 'foobarbaz' } + version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + version('1.4', 'foobarbaz') + version('1.3', 'foobarbaz') + version('1.2', 'foobarbaz') + version('1.1', 'foobarbaz') + version('1.0', 'foobarbaz') provides('mpi@:2.0') provides('mpi@:2.1', when='@1.1:') diff --git a/var/spack/mock_packages/mpileaks/package.py b/var/spack/mock_packages/mpileaks/package.py index c34d5991e6..c6be37bc13 100644 --- a/var/spack/mock_packages/mpileaks/package.py +++ b/var/spack/mock_packages/mpileaks/package.py @@ -28,10 +28,10 @@ class Mpileaks(Package): homepage = "http://www.llnl.gov" url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" - versions = { 1.0 : 'foobarbaz', - 2.1 : 'foobarbaz', - 2.2 : 'foobarbaz', - 2.3 : 'foobarbaz' } + version(1.0, 'foobarbaz') + version(2.1, 'foobarbaz') + version(2.2, 'foobarbaz') + version(2.3, 'foobarbaz') depends_on("mpi") depends_on("callpath") diff --git a/var/spack/mock_packages/trivial_install_test_package/package.py b/var/spack/mock_packages/trivial_install_test_package/package.py index b665825b32..c4db9f5f07 100644 --- a/var/spack/mock_packages/trivial_install_test_package/package.py +++ b/var/spack/mock_packages/trivial_install_test_package/package.py @@ -30,7 +30,7 @@ class TrivialInstallTestPackage(Package): homepage = "http://www.example.com/trivial_install" url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz" - versions = { '1.0' : 'foobarbaz' } + version('1.0', 'foobarbaz') def install(self, spec, prefix): configure('--prefix=%s' % prefix) diff --git a/var/spack/mock_packages/zmpi/package.py b/var/spack/mock_packages/zmpi/package.py index a86bd706bb..8c6ceda6d3 100644 --- a/var/spack/mock_packages/zmpi/package.py +++ b/var/spack/mock_packages/zmpi/package.py @@ -30,7 +30,7 @@ class Zmpi(Package): homepage = "http://www.spack-fake-zmpi.org" url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz" - versions = { '1.0' : 'foobarbaz' } + version('1.0', 'foobarbaz') provides('mpi@:10.0') depends_on('fake') diff --git a/var/spack/packages/SAMRAI/package.py b/var/spack/packages/SAMRAI/package.py index bb88ec3292..3dfb00b391 100644 --- a/var/spack/packages/SAMRAI/package.py +++ b/var/spack/packages/SAMRAI/package.py @@ -11,17 +11,15 @@ class Samrai(Package): url = "https://computation-rnd.llnl.gov/SAMRAI/download/SAMRAI-v3.7.3.tar.gz" list_url = homepage - versions = { - '3.7.3' : '12d574eacadf8c9a70f1bb4cd1a69df6', - '3.7.2' : 'f6a716f171c9fdbf3cb12f71fa6e2737', - '3.6.3-beta' : 'ef0510bf2893042daedaca434e5ec6ce', - '3.5.2-beta' : 'd072d9d681eeb9ada15ce91bea784274', - '3.5.0-beta' : '1ad18a319fc573e12e2b1fbb6f6b0a19', - '3.4.1-beta' : '00814cbee2cb76bf8302aff56bbb385b', - '3.3.3-beta' : '1db3241d3e1cab913dc310d736c34388', - '3.3.2-beta' : 'e598a085dab979498fcb6c110c4dd26c', - '2.4.4' : '04fb048ed0efe7c531ac10c81cc5f6ac', - } + version('3.7.3', '12d574eacadf8c9a70f1bb4cd1a69df6') + version('3.7.2', 'f6a716f171c9fdbf3cb12f71fa6e2737') + version('3.6.3-beta', 'ef0510bf2893042daedaca434e5ec6ce') + version('3.5.2-beta', 'd072d9d681eeb9ada15ce91bea784274') + version('3.5.0-beta', '1ad18a319fc573e12e2b1fbb6f6b0a19') + version('3.4.1-beta', '00814cbee2cb76bf8302aff56bbb385b') + version('3.3.3-beta', '1db3241d3e1cab913dc310d736c34388') + version('3.3.2-beta', 'e598a085dab979498fcb6c110c4dd26c') + version('2.4.4', '04fb048ed0efe7c531ac10c81cc5f6ac') depends_on("mpi") depends_on("zlib") diff --git a/var/spack/packages/adept-utils/package.py b/var/spack/packages/adept-utils/package.py new file mode 100644 index 0000000000..2515322ec2 --- /dev/null +++ b/var/spack/packages/adept-utils/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class AdeptUtils(Package): + """Utility libraries for LLNL performance tools.""" + + homepage = "https://github.com/scalability-llnl/adept-utils" + url = "https://github.com/scalability-llnl/adept-utils/archive/v1.0.tar.gz" + + version('1.0', '5c6cd9badce56c945ac8551e34804397') + + depends_on("boost") + depends_on("mpi") + + def install(self, spec, prefix): + cmake(*std_cmake_args) + make() + make("install") diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py index 71c3058d79..d3231c3baa 100644 --- a/var/spack/packages/boost/package.py +++ b/var/spack/packages/boost/package.py @@ -14,32 +14,31 @@ class Boost(Package): list_url = "http://sourceforge.net/projects/boost/files/boost/" list_depth = 2 - versions = { - '1.55.0' : 'd6eef4b4cacb2183f2bf265a5a03a354', - '1.54.0' : '15cb8c0803064faef0c4ddf5bc5ca279', - '1.53.0' : 'a00d22605d5dbcfb4c9936a9b35bc4c2', - '1.52.0' : '3a855e0f919107e0ca4de4d84ad3f750', - '1.51.0' : '4b6bd483b692fd138aef84ed2c8eb679', - '1.50.0' : '52dd00be775e689f55a987baebccc462', - '1.49.0' : '0d202cb811f934282dea64856a175698', - '1.48.0' : 'd1e9a7a7f532bb031a3c175d86688d95', - '1.47.0' : 'a2dc343f7bc7f83f8941e47ed4a18200', - '1.46.1' : '7375679575f4c8db605d426fc721d506', - '1.46.0' : '37b12f1702319b73876b0097982087e0', - '1.45.0' : 'd405c606354789d0426bc07bea617e58', - '1.44.0' : 'f02578f5218f217a9f20e9c30e119c6a', - '1.43.0' : 'dd49767bfb726b0c774f7db0cef91ed1', - '1.42.0' : '7bf3b4eb841b62ffb0ade2b82218ebe6', - '1.41.0' : '8bb65e133907db727a2a825c5400d0a6', - '1.40.0' : 'ec3875caeac8c52c7c129802a8483bd7', - '1.39.0' : 'a17281fd88c48e0d866e1a12deecbcc0', - '1.38.0' : '5eca2116d39d61382b8f8235915cb267', - '1.37.0' : '8d9f990bfb7e83769fa5f1d6f065bc92', - '1.36.0' : '328bfec66c312150e4c2a78dcecb504b', - '1.35.0' : 'dce952a7214e72d6597516bcac84048b', - '1.34.1' : '2d938467e8a448a2c9763e0a9f8ca7e5', - '1.34.0' : 'ed5b9291ffad776f8757a916e1726ad0' - } + version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354') + version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279') + version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2') + version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750') + version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679') + version('1.50.0', '52dd00be775e689f55a987baebccc462') + version('1.49.0', '0d202cb811f934282dea64856a175698') + version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95') + version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200') + version('1.46.1', '7375679575f4c8db605d426fc721d506') + version('1.46.0', '37b12f1702319b73876b0097982087e0') + version('1.45.0', 'd405c606354789d0426bc07bea617e58') + version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a') + version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1') + version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6') + version('1.41.0', '8bb65e133907db727a2a825c5400d0a6') + version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7') + version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0') + version('1.38.0', '5eca2116d39d61382b8f8235915cb267') + version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92') + version('1.36.0', '328bfec66c312150e4c2a78dcecb504b') + version('1.35.0', 'dce952a7214e72d6597516bcac84048b') + version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5') + version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0') + def url_for_version(self, version): """Handle Boost's weird URLs, which write the version two different ways.""" diff --git a/var/spack/packages/callpath/package.py b/var/spack/packages/callpath/package.py index 5d92d77302..84170d9c9e 100644 --- a/var/spack/packages/callpath/package.py +++ b/var/spack/packages/callpath/package.py @@ -25,13 +25,20 @@ from spack import * class Callpath(Package): - homepage = "https://github.com/tgamblin/callpath" - url = "http://github.com/tgamblin/callpath-0.2.tar.gz" + """Library for representing callpaths consistently in + distributed-memory performance tools.""" + + homepage = "https://github.com/scalability-llnl/callpath" + url = "https://github.com/scalability-llnl/callpath/archive/v1.0.1.tar.gz" + + version('1.0.1', '0047983d2a52c5c335f8ba7f5bab2325') depends_on("dyninst") + depends_on("adept-utils") depends_on("mpi") def install(self, spec, prefix): - configure("--prefix=" + prefix) + # TODO: offer options for the walker used. + cmake('.', "-DCALLPATH_WALKER=dyninst", *std_cmake_args) make() make("install") diff --git a/var/spack/packages/clang/package.py b/var/spack/packages/clang/package.py new file mode 100644 index 0000000000..b0097bd126 --- /dev/null +++ b/var/spack/packages/clang/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Clang(Package): + """The goal of the Clang project is to create a new C, C++, + Objective C and Objective C++ front-end for the LLVM compiler. + """ + homepage = "http://clang.llvm.org" + url = "http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.gz" + + depends_on("llvm") + + version('3.4.2', '87945973b7c73038871c5f849a818588') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compiler.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DCLANG_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, + '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, + *std_cmake_args) + make() + make("install") diff --git a/var/spack/packages/cmake/package.py b/var/spack/packages/cmake/package.py index 70406610b6..ca6553df84 100644 --- a/var/spack/packages/cmake/package.py +++ b/var/spack/packages/cmake/package.py @@ -27,7 +27,8 @@ from spack import * class Cmake(Package): homepage = 'https://www.cmake.org' url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz' - versions = { '2.8.10.2' : '097278785da7182ec0aea8769d06860c' } + + version('2.8.10.2', '097278785da7182ec0aea8769d06860c') def install(self, spec, prefix): configure('--prefix=' + prefix, diff --git a/var/spack/packages/cube/package.py b/var/spack/packages/cube/package.py new file mode 100644 index 0000000000..d97cd25636 --- /dev/null +++ b/var/spack/packages/cube/package.py @@ -0,0 +1,55 @@ +# FIXME: Add copyright statement +# +from spack import * +from contextlib import closing + +class Cube(Package): + """Cube the profile viewer for Score-P and Scalasca profiles. It + displays a multi-dimensional performance space consisting + of the dimensions (i) performance metric, (ii) call path, + and (iii) system resource.""" + + homepage = "http://www.scalasca.org/software/cube-4.x/download.html" + url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" + + version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20') + + version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f', + url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz') + + # Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt + # to guess a matching C compiler when configuring scorep-score + backend_user_provided = """\ +CC=cc +CXX=CC +F77=f77 +FC=f90 +#CFLAGS=-fPIC +#CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=CC +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + + with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + + configure_args = ["--prefix=%s" % prefix, + "--with-custom-compilers", + "--without-paraver", + "--without-gui"] + + configure(*configure_args) + + make(parallel=False) + make("install", parallel=False) diff --git a/var/spack/packages/dtcmp/package.py b/var/spack/packages/dtcmp/package.py new file mode 100644 index 0000000000..9d940583c1 --- /dev/null +++ b/var/spack/packages/dtcmp/package.py @@ -0,0 +1,20 @@ +import os +from spack import * + +class Dtcmp(Package): + """The Datatype Comparison Library provides comparison operations and + parallel sort algorithms for MPI applications.""" + + homepage = "https://github.com/hpc/dtcmp" + url = "https://github.com/hpc/dtcmp/releases/download/v1.0.3/dtcmp-1.0.3.tar.gz" + + version('1.0.3', 'cdd8ccf71e8ff67de2558594a7fcd317') + + depends_on('mpi') + depends_on('lwgrp') + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--with-lwgrp=" + spec['lwgrp'].prefix) + make() + make("install") diff --git a/var/spack/packages/dyninst/package.py b/var/spack/packages/dyninst/package.py index 3f8696b6d8..069237f7ff 100644 --- a/var/spack/packages/dyninst/package.py +++ b/var/spack/packages/dyninst/package.py @@ -29,8 +29,8 @@ class Dyninst(Package): url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" list_url = "http://www.dyninst.org/downloads/dyninst-8.x" - versions = {'8.1.2' : 'bf03b33375afa66fe0efa46ce3f4b17a', - '8.1.1' : '1f8743e3a5662b25ce64a7edf647e77d' } + version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a') + version('8.1.1', '1f8743e3a5662b25ce64a7edf647e77d') depends_on("libelf") depends_on("libdwarf") diff --git a/var/spack/packages/extrae/package.py b/var/spack/packages/extrae/package.py new file mode 100644 index 0000000000..3b842bc1ec --- /dev/null +++ b/var/spack/packages/extrae/package.py @@ -0,0 +1,38 @@ +from spack import * + +class Extrae(Package): + """Extrae is the package devoted to generate tracefiles which can + be analyzed later by Paraver. Extrae is a tool that uses + different interposition mechanisms to inject probes into the + target application so as to gather information regarding the + application performance. The Extrae instrumentation package can + instrument the MPI programin model, and the following parallel + programming models either alone or in conjunction with MPI : + OpenMP, CUDA, OpenCL, pthread, OmpSs""" + homepage = "http://www.bsc.es/computer-sciences/extrae" + url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-2.5.1.tar.bz2" + version('2.5.1', '422376b9c68243bd36a8a73fa62de106') + + #depends_on("mpi") + depends_on("openmpi@:1.6") + depends_on("dyninst") + depends_on("libunwind") + depends_on("boost") + depends_on("libdwarf") + depends_on("papi") + + def install(self, spec, prefix): + if 'openmpi' in spec: + mpi = spec['openmpi'] + #if spec.satisfies('@2.5.1') and spec.satisfies('^openmpi@1.6.5'): + # tty.error("Some headers conflict when using OpenMPI 1.6.5. Please use 1.6 instead.") + elif 'mpich' in spec: + mpi = spec['mpich'] + elif 'mvapich2' in spec: + mpi = spec['mvapich2'] + + configure("--prefix=%s" % prefix, "--with-mpi=%s" % mpi.prefix, "--with-unwind=%s" % spec['libunwind'].prefix, "--with-dyninst=%s" % spec['dyninst'].prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-dwarf=%s" % spec['libdwarf'].prefix, "--with-papi=%s" % spec['papi'].prefix, "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) + + make() + make("install", parallel=False) + diff --git a/var/spack/packages/graphlib/package.py b/var/spack/packages/graphlib/package.py index c959135147..ddac0b2b66 100644 --- a/var/spack/packages/graphlib/package.py +++ b/var/spack/packages/graphlib/package.py @@ -5,7 +5,7 @@ class Graphlib(Package): homepage = "http://https://github.com/lee218llnl/graphlib" url = "https://github.com/lee218llnl/graphlib/archive/v2.0.0.tar.gz" - versions = { '2.0.0' : '43c6df84f1d38ba5a5dce0ae19371a70', } + version('2.0.0', '43c6df84f1d38ba5a5dce0ae19371a70') def install(self, spec, prefix): cmake(".", *std_cmake_args) diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py index 7705676dba..615c2a7fe4 100644 --- a/var/spack/packages/hdf5/package.py +++ b/var/spack/packages/hdf5/package.py @@ -11,10 +11,10 @@ class Hdf5(Package): list_url = "http://www.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 - versions = { '1.8.13' : 'c03426e9e77d7766944654280b467289', } + version('1.8.13', 'c03426e9e77d7766944654280b467289') depends_on("mpi") - depends_on("zlib") + depends_on("zlib") # TODO: currently hard-coded to use OpenMPI def install(self, spec, prefix): diff --git a/var/spack/packages/hwloc/package.py b/var/spack/packages/hwloc/package.py new file mode 100644 index 0000000000..31a31f376a --- /dev/null +++ b/var/spack/packages/hwloc/package.py @@ -0,0 +1,25 @@ +from spack import * + +class Hwloc(Package): + """The Portable Hardware Locality (hwloc) software package + provides a portable abstraction (across OS, versions, + architectures, ...) of the hierarchical topology of modern + architectures, including NUMA memory nodes, sockets, shared + caches, cores and simultaneous multithreading. It also gathers + various system attributes such as cache and memory information + as well as the locality of I/O devices such as network + interfaces, InfiniBand HCAs or GPUs. It primarily aims at + helping applications with gathering information about modern + computing hardware so as to exploit it accordingly and + efficiently.""" + homepage = "http://www.open-mpi.org/projects/hwloc/" + url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" + + version('1.9', '1f9f9155682fe8946a97c08896109508') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") + diff --git a/var/spack/packages/launchmon/package.py b/var/spack/packages/launchmon/package.py index 7f12f41308..b6773a85bc 100644 --- a/var/spack/packages/launchmon/package.py +++ b/var/spack/packages/launchmon/package.py @@ -28,7 +28,7 @@ class Launchmon(Package): homepage = "http://sourceforge.net/projects/launchmon" url = "http://downloads.sourceforge.net/project/launchmon/launchmon/1.0.1%20release/launchmon-1.0.1.tar.gz" - versions = { '1.0.1' : '2f12465803409fd07f91174a4389eb2b' } + version('1.0.1', '2f12465803409fd07f91174a4389eb2b') def install(self, spec, prefix): configure( diff --git a/var/spack/packages/libarchive/package.py b/var/spack/packages/libarchive/package.py new file mode 100644 index 0000000000..cbd4b89cd0 --- /dev/null +++ b/var/spack/packages/libarchive/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libarchive(Package): + """libarchive: C library and command-line tools for reading and + writing tar, cpio, zip, ISO, and other archive formats.""" + homepage = "http://www.libarchive.org" + url = "http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz" + + version('3.1.2', 'efad5a503f66329bb9d2f4308b5de98a') + version('3.1.1', '1f3d883daf7161a0065e42a15bbf168f') + version('3.1.0', '095a287bb1fd687ab50c85955692bf3a') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/libcircle/package.py b/var/spack/packages/libcircle/package.py new file mode 100644 index 0000000000..3f7c996fb0 --- /dev/null +++ b/var/spack/packages/libcircle/package.py @@ -0,0 +1,18 @@ +import os +from spack import * + +class Libcircle(Package): + """libcircle provides an efficient distributed queue on a cluster, + using self-stabilizing work stealing.""" + + homepage = "https://github.com/hpc/libcircle" + + version('0.2.1-rc.1', '2b1369a5736457239f908abf88143ec2', + url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz') + + depends_on('mpi') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/packages/libdwarf/package.py b/var/spack/packages/libdwarf/package.py index abd6d1f4a1..c4d71ebc01 100644 --- a/var/spack/packages/libdwarf/package.py +++ b/var/spack/packages/libdwarf/package.py @@ -44,9 +44,9 @@ class Libdwarf(Package): url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" list_url = homepage - versions = { '20130729' : '4cc5e48693f7b93b7aa0261e63c0e21d', - '20130207' : '64b42692e947d5180e162e46c689dfbf', - '20130126' : 'ded74a5e90edb5a12aac3c29d260c5db' } + version('20130729', '4cc5e48693f7b93b7aa0261e63c0e21d') + version('20130207', '64b42692e947d5180e162e46c689dfbf') + version('20130126', 'ded74a5e90edb5a12aac3c29d260c5db') depends_on("libelf") diff --git a/var/spack/packages/libelf/package.py b/var/spack/packages/libelf/package.py index f663ba750d..bf2fefabd5 100644 --- a/var/spack/packages/libelf/package.py +++ b/var/spack/packages/libelf/package.py @@ -33,8 +33,8 @@ class Libelf(Package): homepage = "http://www.mr511.de/software/english.html" url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" - versions = { '0.8.13' : '4136d7b4c04df68b686570afa26988ac', - '0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7', } + version('0.8.13', '4136d7b4c04df68b686570afa26988ac') + version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') def install(self, spec, prefix): configure("--prefix=" + prefix, diff --git a/var/spack/packages/libevent/package.py b/var/spack/packages/libevent/package.py index 9b45b532d7..11b1083d67 100644 --- a/var/spack/packages/libevent/package.py +++ b/var/spack/packages/libevent/package.py @@ -11,18 +11,17 @@ class Libevent(Package): url = "https://github.com/downloads/libevent/libevent/libevent-2.0.21-stable.tar.gz" list_url = "http://libevent.org/old-releases.html" - versions = { - '2.0.21' : 'b2405cc9ebf264aa47ff615d9de527a2', - '2.0.20' : '94270cdee32c0cd0aa9f4ee6ede27e8e', - '2.0.19' : '91111579769f46055b0a438f5cc59572', - '2.0.18' : 'aa1ce9bc0dee7b8084f6855765f2c86a', - '2.0.17' : 'dad64aaaaff16b5fbec25160c06fee9a', - '2.0.16' : '899efcffccdb3d5111419df76e7dc8df', - '2.0.15' : '2643abe7ba242df15c08b2cc14ec8759', - '2.0.14' : 'cac0f379da35d3b98f83ac16fcfe1df4', - '2.0.13' : 'af786b4b3f790c9d3279792edf7867fc', - '2.0.12' : '42986228baf95e325778ed328a93e070', - } + version('2.0.21', 'b2405cc9ebf264aa47ff615d9de527a2') + version('2.0.20', '94270cdee32c0cd0aa9f4ee6ede27e8e') + version('2.0.19', '91111579769f46055b0a438f5cc59572') + version('2.0.18', 'aa1ce9bc0dee7b8084f6855765f2c86a') + version('2.0.17', 'dad64aaaaff16b5fbec25160c06fee9a') + version('2.0.16', '899efcffccdb3d5111419df76e7dc8df') + version('2.0.15', '2643abe7ba242df15c08b2cc14ec8759') + version('2.0.14', 'cac0f379da35d3b98f83ac16fcfe1df4') + version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc') + version('2.0.12', '42986228baf95e325778ed328a93e070') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/libmonitor/package.py b/var/spack/packages/libmonitor/package.py new file mode 100644 index 0000000000..210712436a --- /dev/null +++ b/var/spack/packages/libmonitor/package.py @@ -0,0 +1,37 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libmonitor(Package): + """Libmonitor is a library for process and thread control.""" + homepage = "http://hpctoolkit.org" + url = "file:///g/g0/legendre/tools/oss/openspeedshop-release-2.1/SOURCES/libmonitor-20130218.tar.gz" + + version('20130218', 'aa85c2c580e2dafb823cc47b09374279') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/packages/libunwind/package.py b/var/spack/packages/libunwind/package.py index c93b5b2c98..aeadc85eb3 100644 --- a/var/spack/packages/libunwind/package.py +++ b/var/spack/packages/libunwind/package.py @@ -28,7 +28,7 @@ class Libunwind(Package): homepage = "http://www.nongnu.org/libunwind/" url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz" - versions = { '1.1' : 'fb4ea2f6fbbe45bf032cd36e586883ce' } + version('1.1', 'fb4ea2f6fbbe45bf032cd36e586883ce') def install(self, spec, prefix): configure("--prefix=" + prefix) diff --git a/var/spack/packages/llvm-lld/package.py b/var/spack/packages/llvm-lld/package.py new file mode 100644 index 0000000000..f229211396 --- /dev/null +++ b/var/spack/packages/llvm-lld/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class LlvmLld(Package): + """lld - The LLVM Linker + lld is a new set of modular code for creating linker tools.""" + homepage = "http://lld.llvm.org" + url = "http://llvm.org/releases/3.4/lld-3.4.src.tar.gz" + + depends_on('llvm') + + version('3.4', '3b6a17e58c8416c869c14dd37682f78e') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compier.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DLLD_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, + '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, + *std_cmake_args) + make() + make("install") diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py new file mode 100644 index 0000000000..c7a10df55a --- /dev/null +++ b/var/spack/packages/llvm/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by David Beckingsale, david@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Llvm(Package): + """The LLVM Project is a collection of modular and reusable compiler and + toolchain technologies. Despite its name, LLVM has little to do with + traditional virtual machines, though it does provide helpful libraries + that can be used to build them. The name "LLVM" itself is not an acronym; + it is the full name of the project. + """ + homepage = "http://llvm.org/" + url = "http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz" + + version('3.4.2', 'a20669f75967440de949ac3b1bad439c') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compiler.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DLLVM_REQUIRES_RTTI=1', + '-DPYTHON_EXECUTABLE=/usr/bin/python', + '-DPYTHON_INCLUDE_DIR=/usr/include/python2.6', + '-DPYTHON_LIBRARY=/usr/lib64/libpython2.6.so', + *std_cmake_args) + make() + make("install") diff --git a/var/spack/packages/lwgrp/package.py b/var/spack/packages/lwgrp/package.py new file mode 100644 index 0000000000..5963382b92 --- /dev/null +++ b/var/spack/packages/lwgrp/package.py @@ -0,0 +1,18 @@ +import os +from spack import * + +class Lwgrp(Package): + """Thie light-weight group library provides process group + representations using O(log N) space and time.""" + + homepage = "https://github.com/hpc/lwgrp" + url = "https://github.com/hpc/lwgrp/releases/download/v1.0.2/lwgrp-1.0.2.tar.gz" + + version('1.0.2', 'ab7ba3bdd8534a651da5076f47f27d8a') + + depends_on('mpi') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py index 703614a587..19a1efe9c3 100644 --- a/var/spack/packages/mpich/package.py +++ b/var/spack/packages/mpich/package.py @@ -32,7 +32,7 @@ class Mpich(Package): list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 - versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0' } + version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') provides('mpi@:3', when='@3:') provides('mpi@:1', when='@1:') diff --git a/var/spack/packages/mpileaks/package.py b/var/spack/packages/mpileaks/package.py index 3307b9fdee..4ef866588c 100644 --- a/var/spack/packages/mpileaks/package.py +++ b/var/spack/packages/mpileaks/package.py @@ -25,13 +25,20 @@ from spack import * class Mpileaks(Package): - homepage = "http://www.llnl.gov" - url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" + """Tool to detect and report leaked MPI objects like MPI_Requests and MPI_Datatypes.""" + + homepage = "https://github.com/hpc/mpileaks" + url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" + + version('1.0', '8838c574b39202a57d7c2d68692718aa') depends_on("mpi") + depends_on("adept-utils") depends_on("callpath") def install(self, spec, prefix): - configure("--prefix=" + prefix) + configure("--prefix=" + prefix, + "--with-adept-utils=" + spec['adept-utils'].prefix, + "--with-callpath=" + spec['callpath'].prefix) make() make("install") diff --git a/var/spack/packages/mrnet/package.py b/var/spack/packages/mrnet/package.py index 89eeaf1e71..6e9766f275 100644 --- a/var/spack/packages/mrnet/package.py +++ b/var/spack/packages/mrnet/package.py @@ -5,8 +5,9 @@ class Mrnet(Package): homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" - versions = { '4.0.0' : 'd00301c078cba57ef68613be32ceea2f', } - versions = { '4.1.0' : '5a248298b395b329e2371bf25366115c', } + version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') + version('4.1.0', '5a248298b395b329e2371bf25366115c') + parallel = False depends_on("boost") diff --git a/var/spack/packages/mvapich2/package.py b/var/spack/packages/mvapich2/package.py index f372679f49..ca0b1287c1 100644 --- a/var/spack/packages/mvapich2/package.py +++ b/var/spack/packages/mvapich2/package.py @@ -1,27 +1,104 @@ +import os from spack import * class Mvapich2(Package): """mvapich2 is an MPI implmenetation for infiniband networks.""" - homepage = "http://mvapich.cse.ohio-state.edu/" - url = "http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz" - versions = { '1.9' : '5dc58ed08fd3142c260b70fe297e127c', } + version('1.9', '5dc58ed08fd3142c260b70fe297e127c', + url="http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz") + patch('ad_lustre_rwcontig_open_source.patch', when='@1.9') + + version('2.0', '9fbb68a4111a8b6338e476dc657388b4', + url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') - provides('mpi@:1', when='@1.9:') + provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2 + provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0 - patch('ad_lustre_rwcontig_open_source.patch', when='@1.9:') def install(self, spec, prefix): + # we'll set different configure flags depending on our environment + configure_args = [] + + # TODO: The MPICH*_FLAGS have a different name for 1.9 + + if '+debug' in spec: + # set configure flags for debug build + configure_args.append("--disable-fast") + configure_args.append("--enable-g=dbg") + configure_args.append("--enable-error-checking=runtime") + configure_args.append("--enable-error-messages=all") + configure_args.append("--enable-nmpi-as-mpi") + + if "%gnu" in spec: + # set variables for GNU compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fno-second-underscore" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fno-second-underscore" + elif "%intel" in spec: + # set variables for Inel compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0" + elif "%pgi" in spec: + # set variables for PGI compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fPIC" + + else: + # set configure flags for normal optimizations + configure_args.append("--enable-fast=all") + configure_args.append("--enable-g=dbg") + configure_args.append("--enable-nmpi-as-mpi") + + if "%gnu" in spec: + # set variables for what compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fno-second-underscore" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fno-second-underscore" + elif "%intel" in spec: + # set variables for Inel compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2" + elif "%pgi" in spec: + # set variables for PGI compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fPIC" + + # determine network type by variant + if "+psm" in spec: + # throw this flag on QLogic systems to use PSM + configure_args.append("--with-device=ch3:psm") + else: + # throw this flag on IB systems + configure_args.append("--with-device=ch3:mrail", "--with-rdma=gen2") + + # TODO: shared-memory build + + # TODO: CUDA + + # TODO: other file systems like panasis + configure( "--prefix=" + prefix, "--enable-f77", "--enable-fc", "--enable-cxx", - "--enable-fast=all", "--enable-g=dbg", "--enable-nmpi-as-mpi", "--enable-shared", "--enable-sharedlibs=gcc", "--enable-debuginfo", "--with-pm=no", "--with-pmi=slurm", - "--with-device=ch3:psm", "--enable-romio", "--with-file-system=lustre+nfs+ufs", - "--disable-mpe", "--without-mpe") + "--disable-mpe", "--without-mpe", + "--disable-silent-rules", + *configure_args) + make() + make("install") diff --git a/var/spack/packages/ncurses/package.py b/var/spack/packages/ncurses/package.py index 221860374b..4885caa628 100644 --- a/var/spack/packages/ncurses/package.py +++ b/var/spack/packages/ncurses/package.py @@ -8,14 +8,12 @@ class Ncurses(Package): """ homepage = "http://invisible-island.net/ncurses/ncurses.html" - url = "http://invisible-island.net/datafiles/release/ncurses.tar.gz" - versions = { 'stable' : '8cb9c412e5f2d96bc6f459aa8c6282a1' } + version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', + url='http://invisible-island.net/datafiles/release/ncurses.tar.gz') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") - def url_for_version(self, version): - return "http://invisible-island.net/datafiles/release/ncurses.tar.gz" diff --git a/var/spack/packages/ompss/package.py b/var/spack/packages/ompss/package.py new file mode 100644 index 0000000000..544671de38 --- /dev/null +++ b/var/spack/packages/ompss/package.py @@ -0,0 +1,49 @@ +from spack import * +import os +import glob + +# working config lines for ompss 14.06 : +#./nanox-0.7/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-mcc=/usr/gapps/exmatex/ompss/ --with-hwloc=/usr +#./mcxx-1.99.2/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-nanox=/usr/gapps/exmatex/ompss --enable-ompss --with-mpi=/opt/mvapich2-intel-shmem-1.7 --enable-tl-openmp-profile --enable-tl-openmp-intel + +class Ompss(Package): + """OmpSs is an effort to integrate features from the StarSs + programming model developed by BSC into a single programming + model. In particular, our objective is to extend OpenMP with + new directives to support asynchronous parallelism and + heterogeneity (devices like GPUs). However, it can also be + understood as new directives extending other accelerator based + APIs like CUDA or OpenCL. Our OmpSs environment is built on top + of our Mercurium compiler and Nanos++ runtime system.""" + homepage = "http://pm.bsc.es/" + url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.06.tar.gz" + version('14.06', '99be5dce74c0d7eea42636d26af47b4181ae2e11') + + # all dependencies are optional, really + depends_on("mpi") + #depends_on("openmp") + depends_on("hwloc") + depends_on("extrae") + + def install(self, spec, prefix): + if 'openmpi' in spec: + mpi = spec['openmpi'] + elif 'mpich' in spec: + mpi = spec['mpich'] + elif 'mvapich' in spec: + mpi = spec['mvapich'] + + openmp_options = ["--enable-tl-openmp-profile"] + if spec.satisfies('%intel'): + openmp_options.append( "--enable-tl-openmp-intel" ) + + os.chdir(glob.glob('./nanox-*').pop()) + configure("--prefix=%s" % prefix, "--with-mcc=%s" % prefix, "--with-extrae=%s" % spec['extrae'].prefix, "--with-hwloc=%s" % spec['hwloc'].prefix) + make() + make("install") + + os.chdir(glob.glob('../mcxx-*').pop()) + configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options) + make() + make("install") + diff --git a/var/spack/packages/opari2/package.py b/var/spack/packages/opari2/package.py new file mode 100644 index 0000000000..daaee61e3a --- /dev/null +++ b/var/spack/packages/opari2/package.py @@ -0,0 +1,65 @@ +# FIXME: Add copyright statement here + +from spack import * +from contextlib import closing + +class Opari2(Package): + """OPARI2 is a source-to-source instrumentation tool for OpenMP and + hybrid codes. It surrounds OpenMP directives and runtime library + calls with calls to the POMP2 measurement interface. + OPARI2 will provide you with a new initialization method that allows + for multi-directory and parallel builds as well as the usage of + pre-instrumented libraries. Furthermore, an efficient way of + tracking parent-child relationships was added. Additionally, we + extended OPARI2 to support instrumentation of OpenMP 3.0 + tied tasks. """ + + homepage = "http://www.vi-hps.org/projects/score-p" + url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" + + version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=mpicc +MPICXX=mpicxx +MPIF77=mpif77 +MPIFC=mpif90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with closing(open("platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + with closing(open("platform-mpi-user-provided", "w")) as mpi_file: + mpi_file.write(self.mpi_user_provided) + + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix, + "--with-custom-compilers", + "--enable-shared") + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index 5ac231a33f..0ce09bdd8d 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -12,7 +12,7 @@ class Openmpi(Package): homepage = "http://www.open-mpi.org" url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2" - versions = { '1.6.5' : '03aed2a4aa4d0b27196962a2a65fc475', } + version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475') provides('mpi@:2') diff --git a/var/spack/packages/openssl/package.py b/var/spack/packages/openssl/package.py new file mode 100644 index 0000000000..c5a8aeb9dc --- /dev/null +++ b/var/spack/packages/openssl/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Openssl(Package): + """The OpenSSL Project is a collaborative effort to develop a + robust, commercial-grade, full-featured, and Open Source + toolkit implementing the Secure Sockets Layer (SSL v2/v3) and + Transport Layer Security (TLS v1) protocols as well as a + full-strength general purpose cryptography library.""" + homepage = "http://www.openssl.org" + url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz" + + version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') + + depends_on("zlib") + parallel = False + + def install(self, spec, prefix): + config = Executable("./config") + config("--prefix=%s" % prefix, + "--openssldir=%s/etc/openssl" % prefix, + "zlib", + "no-krb5", + "shared") + + make() + make("install") diff --git a/var/spack/packages/otf2/package.py b/var/spack/packages/otf2/package.py new file mode 100644 index 0000000000..fa0a5898b6 --- /dev/null +++ b/var/spack/packages/otf2/package.py @@ -0,0 +1,74 @@ +# FIXME: Add copyright + +from spack import * +from contextlib import closing +import os + +class Otf2(Package): + """The Open Trace Format 2 is a highly scalable, memory efficient event + trace data format plus support library.""" + + homepage = "http://www.vi-hps.org/score-p" + url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" + + version('1.4', 'a23c42e936eb9209c4e08b61c3cf5092', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz") + version('1.3.1', 'd0ffc4e858455ace4f596f910e68c9f2', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz") + version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=cc +MPICXX=c++ +MPIF77=f77 +MPIFC=f90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + @when('@:1.2.1') + def version_specific_args(self): + return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"] + + @when('@1.3:') + def version_specific_args(self): + # TODO: figure out what scorep's build does as of otf2 1.3 + return ["--with-custom-compilers"] + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with closing(open("platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + with closing(open("platform-mpi-user-provided", "w")) as mpi_file: + mpi_file.write(self.mpi_user_provided) + + configure_args=["--prefix=%s" % prefix, + "--enable-shared"] + + configure_args.extend(self.version_specific_args()) + + configure(*configure_args) + + make() + make("install") diff --git a/var/spack/packages/papi/package.py b/var/spack/packages/papi/package.py new file mode 100644 index 0000000000..596f7114d6 --- /dev/null +++ b/var/spack/packages/papi/package.py @@ -0,0 +1,35 @@ +from spack import * +import os + +class Papi(Package): + """PAPI provides the tool designer and application engineer with a + consistent interface and methodology for use of the performance + counter hardware found in most major microprocessors. PAPI + enables software engineers to see, in near real time, the + relation between software performance and processor events. In + addition Component PAPI provides access to a collection of + components that expose performance measurement opportunites + across the hardware and software stack.""" + homepage = "http://icl.cs.utk.edu/papi/index.html" + url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.3.0.tar.gz" + + version('5.3.0', '367961dd0ab426e5ae367c2713924ffb') + + def install(self, spec, prefix): + os.chdir("src/") + + configure_args=["--prefix=%s" % prefix] + + # need to force consistency in the use of compilers + if spec.satisfies('%gcc'): + configure_args.append('CC=gcc') + configure_args.append('MPICH_CC=gcc') + if spec.satisfies('%intel'): + configure_args.append('CC=icc') + configure_args.append('MPICH_CC=icc') + + configure(*configure_args) + + make() + make("install") + diff --git a/var/spack/packages/paraver/package.py b/var/spack/packages/paraver/package.py new file mode 100644 index 0000000000..45bac95b28 --- /dev/null +++ b/var/spack/packages/paraver/package.py @@ -0,0 +1,41 @@ +from spack import * +import os + +class Paraver(Package): + """"A very powerful performance visualization and analysis tool + based on traces that can be used to analyse any information that + is expressed on its input trace format. Traces for parallel MPI, + OpenMP and other programs can be genereated with Extrae.""" + homepage = "http://www.bsc.es/computer-sciences/performance-tools/paraver" + url = "http://www.bsc.es/ssl/apps/performanceTools/files/paraver-sources-4.5.2.tar.gz" + + version('4.5.2', 'ea463dd494519395c99ebae294edee17') + + depends_on("boost") + #depends_on("extrae") + depends_on("wx") + depends_on("wxpropgrid") + + def install(self, spec, prefix): + os.chdir("ptools_common_files") + configure("--prefix=%s" % prefix) + make() + make("install") + + os.chdir("../paraver-kernel") + #"--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, "--with-ptools-common-files=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization") + make() + make("install") + + os.chdir("../paraver-toolset") + configure("--prefix=%s" % prefix) + make() + make("install") + + os.chdir("../wxparaver") + #"--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", "--with-wxdir=%s" % spec['wx'].prefix.bin) + make() + make("install") + diff --git a/var/spack/packages/parmetis/package.py b/var/spack/packages/parmetis/package.py index 10a48503b2..d8cd337304 100644 --- a/var/spack/packages/parmetis/package.py +++ b/var/spack/packages/parmetis/package.py @@ -8,7 +8,7 @@ class Parmetis(Package): homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview" url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz" - versions = { '4.0.3' : 'f69c479586bf6bb7aff6a9bc0c739628', } + version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628') depends_on('mpi') diff --git a/var/spack/packages/pmgr_collective/package.py b/var/spack/packages/pmgr_collective/package.py index 0874e2d17b..5d9b02acc3 100644 --- a/var/spack/packages/pmgr_collective/package.py +++ b/var/spack/packages/pmgr_collective/package.py @@ -30,7 +30,7 @@ class PmgrCollective(Package): homepage = "http://www.sourceforge.net/projects/pmgrcollective" url = "http://downloads.sourceforge.net/project/pmgrcollective/pmgrcollective/PMGR_COLLECTIVE-1.0/pmgr_collective-1.0.tgz" - versions = { '1.0' : '0384d008774274cc3fc7b4d810dfd07e' } + version('1.0', '0384d008774274cc3fc7b4d810dfd07e') def install(self, spec, prefix): make('PREFIX="' + prefix + '"') diff --git a/var/spack/packages/postgresql/package.py b/var/spack/packages/postgresql/package.py new file mode 100644 index 0000000000..46922b7b71 --- /dev/null +++ b/var/spack/packages/postgresql/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Postgresql(Package): + """PostgreSQL is a powerful, open source object-relational + database system. It has more than 15 years of active + development and a proven architecture that has earned it a + strong reputation for reliability, data integrity, and + correctness.""" + homepage = "http://www.postgresql.org/" + url = "http://ftp.postgresql.org/pub/source/v9.3.4/postgresql-9.3.4.tar.bz2" + + version('9.3.4', 'd0a41f54c377b2d2fab4a003b0dac762') + + depends_on("openssl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-openssl") + make() + make("install") diff --git a/var/spack/packages/scalasca/package.py b/var/spack/packages/scalasca/package.py new file mode 100644 index 0000000000..b76d0a7df5 --- /dev/null +++ b/var/spack/packages/scalasca/package.py @@ -0,0 +1,43 @@ +# FIXME: Add copyright + +from spack import * + +class Scalasca(Package): + """Scalasca is a software tool that supports the performance optimization + of parallel programs by measuring and analyzing their runtime behavior. + The analysis identifies potential performance bottlenecks - in + particular those concerning communication and synchronization - and + offers guidance in exploring their causes.""" + + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.scalasca.org" + url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1-rc2.tar.gz" + + version('2.1-rc2', '1a95a39e5430539753e956a7524a756b') + + depends_on("mpi") + depends_on("otf2@1.4") + depends_on("cube") + + def install(self, spec, prefix): + configure_args = ["--prefix=%s" % prefix, + "--with-otf2=%s" % spec['otf2'].prefix.bin, + "--with-cube=%s" % spec['cube'].prefix.bin, + "--enable-shared"] + + if spec.satisfies('%gcc'): + configure_args.append('--with-nocross-compiler-suite=gcc') + if spec.satisfies('%intel'): + configure_args.append('--with-nocross-compiler-suite=intel') + + configure(*configure_args) + + make() + make("install") + + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/packages/scorep/package.py b/var/spack/packages/scorep/package.py new file mode 100644 index 0000000000..32a772e3db --- /dev/null +++ b/var/spack/packages/scorep/package.py @@ -0,0 +1,75 @@ +# FIXME: Add copyright statement + +from spack import * +from contextlib import closing + +class Scorep(Package): + """The Score-P measurement infrastructure is a highly scalable and + easy-to-use tool suite for profiling, event tracing, and online + analysis of HPC applications.""" + + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.vi-hps.org/projects/score-p" + url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" + + version('1.3', '9db6f957b7f51fa01377a9537867a55c', + url = 'http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz') + + version('1.2.3', '4978084e7cbd05b94517aa8beaea0817') + + depends_on("mpi") + depends_on("papi") + # depends_on("otf2@1.2:1.2.1") # only Score-P 1.2.x + depends_on("otf2") + depends_on("opari2") + depends_on("cube@4.2:4.2.3") + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=mpicc +MPICXX=mpicxx +MPIF77=mpif77 +MPIFC=mpif90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with closing(open("platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + with closing(open("platform-mpi-user-provided", "w")) as mpi_file: + mpi_file.write(self.mpi_user_provided) + + configure_args = ["--prefix=%s" % prefix, + "--with-custom-compilers", + "--with-otf2=%s" % spec['otf2'].prefix.bin, + "--with-opari2=%s" % spec['opari2'].prefix.bin, + "--with-cube=%s" % spec['cube'].prefix.bin, + "--with-papi-header=%s" % spec['papi'].prefix.include, + "--with-papi-lib=%s" % spec['papi'].prefix.lib, + "--enable-shared"] + + configure(*configure_args) + + make() + make("install") diff --git a/var/spack/packages/scr/package.py b/var/spack/packages/scr/package.py index cb908e830e..d480dba62f 100644 --- a/var/spack/packages/scr/package.py +++ b/var/spack/packages/scr/package.py @@ -30,7 +30,7 @@ class Scr(Package): depends_on("mpi") - versions = { '1.1-7' : 'a5930e9ab27d1b7049447c2fd7734ebd' } + version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd') def install(self, spec, prefix): configure("--prefix=" + prefix, diff --git a/var/spack/packages/spindle/package.py b/var/spack/packages/spindle/package.py index 0d106221d8..bb0b74ab6f 100644 --- a/var/spack/packages/spindle/package.py +++ b/var/spack/packages/spindle/package.py @@ -29,7 +29,7 @@ class Spindle(Package): url = "https://github.com/hpc/Spindle/archive/v0.8.1.tar.gz" list_url = "https://github.com/hpc/Spindle/releases" - versions = {'0.8.1' : 'f11793a6b9d8df2cd231fccb2857d912' } + version('0.8.1', 'f11793a6b9d8df2cd231fccb2857d912') depends_on("launchmon") diff --git a/var/spack/packages/sqlite/package.py b/var/spack/packages/sqlite/package.py new file mode 100644 index 0000000000..734b0b6cb6 --- /dev/null +++ b/var/spack/packages/sqlite/package.py @@ -0,0 +1,40 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Sqlite(Package): + """SQLite3 is an SQL database engine in a C library. Programs that + link the SQLite3 library can have SQL database access without + running a separate RDBMS process. + """ + homepage = "www.sqlite.org" + + version('3.8.5', '0544ef6d7afd8ca797935ccc2685a9ed', + url='http://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/packages/stat/package.py b/var/spack/packages/stat/package.py index 9f156d53e7..956b0dcc8c 100644 --- a/var/spack/packages/stat/package.py +++ b/var/spack/packages/stat/package.py @@ -5,8 +5,8 @@ class Stat(Package): homepage = "http://paradyn.org/STAT/STAT.html" url = "https://github.com/lee218llnl/stat/archive/v2.0.0.tar.gz" - versions = { '2.0.0' : 'c7494210b0ba26b577171b92838e1a9b', } - versions = { '2.1.0' : 'ece26beaf057aa9134d62adcdda1ba91', } + version('2.0.0', 'c7494210b0ba26b577171b92838e1a9b') + version('2.1.0', 'ece26beaf057aa9134d62adcdda1ba91') depends_on('libdwarf') depends_on('dyninst') @@ -20,6 +20,7 @@ class Stat(Package): configure( "--enable-gui", "--prefix=%s" % prefix, + "--disable-examples", # Examples require MPI: avoid this dependency. "--with-launchmon=%s" % spec['launchmon'].prefix, "--with-mrnet=%s" % spec['mrnet'].prefix, "--with-graphlib=%s" % spec['graphlib'].prefix, diff --git a/var/spack/packages/tau/package.py b/var/spack/packages/tau/package.py index 62c467b572..048fac80aa 100644 --- a/var/spack/packages/tau/package.py +++ b/var/spack/packages/tau/package.py @@ -10,7 +10,7 @@ class Tau(Package): homepage = "http://www.cs.uoregon.edu/research/tau" url = "http://www.cs.uoregon.edu/research/paracomp/tau/tauprofile/dist/tau-2.23.1.tar.gz" - versions = { '2.23.1' : '6593b47ae1e7a838e632652f0426fe72', } + version('2.23.1', '6593b47ae1e7a838e632652f0426fe72') def install(self, spec, prefix): # TAU isn't happy with directories that have '@' in the path. Sigh. diff --git a/var/spack/packages/tmux/package.py b/var/spack/packages/tmux/package.py index d4e3d9f829..23d36db427 100644 --- a/var/spack/packages/tmux/package.py +++ b/var/spack/packages/tmux/package.py @@ -10,7 +10,7 @@ class Tmux(Package): homepage = "http://tmux.sourceforge.net" url = "http://downloads.sourceforge.net/project/tmux/tmux/tmux-1.9/tmux-1.9a.tar.gz" - versions = { '1.9a' : 'b07601711f96f1d260b390513b509a2d', } + version('1.9a', 'b07601711f96f1d260b390513b509a2d') depends_on('libevent') depends_on('ncurses') diff --git a/var/spack/packages/vim/package.py b/var/spack/packages/vim/package.py index 9398134b50..5c6d7cb56b 100644 --- a/var/spack/packages/vim/package.py +++ b/var/spack/packages/vim/package.py @@ -13,19 +13,17 @@ class Vim(Package): url = "ftp://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2" list_url = "http://ftp.vim.org/pub/vim/unix/" - versions = { - '7.4' : '607e135c559be642f210094ad023dc65', - '7.3' : '5b9510a17074e2b37d8bb38ae09edbf2', - '7.2' : 'f0901284b338e448bfd79ccca0041254', - '7.1' : '44c6b4914f38d6f9aa959640b89da329', - '7.0' : '4ca69757678272f718b1041c810d82d8', - '6.4' : '774c14d93ce58674b3b2c880edd12d77', - '6.3' : '821fda8f14d674346b87e3ef9cb96389', - '6.2' : 'c49d360bbd069d00e2a57804f2a123d9', - '6.1.405' : 'd220ff58f2c72ed606e6d0297c2f2a7c', - '6.1' : '7fd0f915adc7c0dab89772884268b030', - '6.0' : '9d9ca84d489af6b3f54639dd97af3774', - } + version('7.4', '607e135c559be642f210094ad023dc65') + version('7.3', '5b9510a17074e2b37d8bb38ae09edbf2') + version('7.2', 'f0901284b338e448bfd79ccca0041254') + version('7.1', '44c6b4914f38d6f9aa959640b89da329') + version('7.0', '4ca69757678272f718b1041c810d82d8') + version('6.4', '774c14d93ce58674b3b2c880edd12d77') + version('6.3', '821fda8f14d674346b87e3ef9cb96389') + version('6.2', 'c49d360bbd069d00e2a57804f2a123d9') + version('6.1.405', 'd220ff58f2c72ed606e6d0297c2f2a7c') + version('6.1', '7fd0f915adc7c0dab89772884268b030') + version('6.0', '9d9ca84d489af6b3f54639dd97af3774') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/wx/package.py b/var/spack/packages/wx/package.py new file mode 100644 index 0000000000..1813a8c8a5 --- /dev/null +++ b/var/spack/packages/wx/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Wx(Package): + """wxWidgets is a C++ library that lets developers create + applications for Windows, Mac OS X, Linux and other platforms + with a single code base. It has popular language bindings for + Python, Perl, Ruby and many other languages, and unlike other + cross-platform toolkits, wxWidgets gives applications a truly + native look and feel because it uses the platform's native API + rather than emulating the GUI. It's also extensive, free, + open-source and mature.""" + homepage = "http://www.wxwidgets.org/" + + version('2.8.12', '2fa39da14bc06ea86fe902579fedc5b1', + url="https://sourceforge.net/projects/wxwindows/files/2.8.12/wxWidgets-2.8.12.tar.gz") + version('3.0.1', 'dad1f1cd9d4c370cbc22700dc492da31', + url="https://sourceforge.net/projects/wxwindows/files/3.0.1/wxWidgets-3.0.1.tar.bz2") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers") + + make(parallel=False) + make("install") + diff --git a/var/spack/packages/wxpropgrid/package.py b/var/spack/packages/wxpropgrid/package.py new file mode 100644 index 0000000000..790cead517 --- /dev/null +++ b/var/spack/packages/wxpropgrid/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Wxpropgrid(Package): + """wxPropertyGrid is a property sheet control for wxWidgets. In + other words, it is a specialized two-column grid for editing + properties such as strings, numbers, flagsets, string arrays, + and colours.""" + homepage = "http://wxpropgrid.sourceforge.net/" + url = "http://prdownloads.sourceforge.net/wxpropgrid/wxpropgrid-1.4.15-src.tar.gz" + + version('1.4.15', 'f44b5cd6fd60718bacfabbf7994f1e93') + + depends_on("wx") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wx'].prefix.bin, "--enable-unicode") + + make() + make("install") + diff --git a/var/spack/packages/zlib/package.py b/var/spack/packages/zlib/package.py index 608b27ddd3..2770f781ac 100644 --- a/var/spack/packages/zlib/package.py +++ b/var/spack/packages/zlib/package.py @@ -9,7 +9,7 @@ class Zlib(Package): homepage = "http://zlib.net" url = "http://zlib.net/zlib-1.2.8.tar.gz" - versions = { '1.2.8' : '44d667c142d7cda120332623eab69f40', } + version('1.2.8', '44d667c142d7cda120332623eab69f40') def install(self, spec, prefix): configure("--prefix=%s" % prefix) |