diff options
author | Todd Gamblin <tgamblin@llnl.gov> | 2016-08-09 13:23:53 -0700 |
---|---|---|
committer | Todd Gamblin <tgamblin@llnl.gov> | 2016-08-10 16:33:37 -0700 |
commit | bf1072c9022cd161b9cc4860e5403a463bc0e05b (patch) | |
tree | 67c405d88312d151a65111fbece886084c706eef /lib | |
parent | 8061deb883c84016f282f7e388c3c019af86b4ca (diff) | |
download | spack-bf1072c9022cd161b9cc4860e5403a463bc0e05b.tar.gz spack-bf1072c9022cd161b9cc4860e5403a463bc0e05b.tar.bz2 spack-bf1072c9022cd161b9cc4860e5403a463bc0e05b.tar.xz spack-bf1072c9022cd161b9cc4860e5403a463bc0e05b.zip |
Make Spack core PEP8 compliant.
Diffstat (limited to 'lib')
147 files changed, 1415 insertions, 1153 deletions
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 4cf99163e0..22ca85abf9 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -106,6 +106,7 @@ def filter_file(regex, repl, *filenames, **kwargs): class FileFilter(object): """Convenience class for calling filter_file a lot.""" + def __init__(self, *filenames): self.filenames = filenames @@ -355,7 +356,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): - tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501 + tuples = traverse_tree( + source_root, dest_root, rel_child, **kwargs) for t in tuples: yield t @@ -422,14 +424,20 @@ def fix_darwin_install_name(path): libs = glob.glob(join_path(path, "*.dylib")) for lib in libs: # fix install name first: - subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 - long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501 + subprocess.Popen( + ["install_name_tool", "-id", lib, lib], + stdout=subprocess.PIPE).communicate()[0] + long_deps = subprocess.Popen( + ["otool", "-L", lib], + stdout=subprocess.PIPE).communicate()[0].split('\n') deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] # fix all dependencies: for dep in deps: for loc in libs: if dep == os.path.basename(loc): - subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 + subprocess.Popen( + ["install_name_tool", "-change", dep, loc, lib], + stdout=subprocess.PIPE).communicate()[0] break diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 63eb08d803..df32012e2d 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -24,7 +24,6 @@ ############################################################################## import os import re -import sys import functools import collections import inspect @@ -39,14 +38,15 @@ def index_by(objects, *funcs): Values are used as keys. For example, suppose you have four objects with attributes that look like this: - a = Spec(name="boost", compiler="gcc", arch="bgqos_0") - b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib") - c = Spec(name="libelf", compiler="xlc", arch="bgqos_0") - d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib") + a = Spec(name="boost", compiler="gcc", arch="bgqos_0") + b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib") + c = Spec(name="libelf", compiler="xlc", arch="bgqos_0") + d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib") - list_of_specs = [a,b,c,d] - index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler) - index2 = index_by(list_of_specs, lambda s: s.compiler) + list_of_specs = [a,b,c,d] + index1 = index_by(list_of_specs, lambda s: s.arch, + lambda s: s.compiler) + index2 = index_by(list_of_specs, lambda s: s.compiler) ``index1'' now has two levels of dicts, with lists at the leaves, like this: @@ -137,7 +137,7 @@ def get_calling_module_name(): finally: del stack - if not '__module__' in caller_locals: + if '__module__' not in caller_locals: raise RuntimeError("Must invoke get_calling_module_name() " "from inside a class definition!") @@ -173,11 +173,11 @@ def has_method(cls, name): class memoized(object): """Decorator that caches the results of a function, storing them in an attribute of that function.""" + def __init__(self, func): self.func = func self.cache = {} - def __call__(self, *args): if not isinstance(args, collections.Hashable): # Not hashable, so just call the function. @@ -187,12 +187,10 @@ class memoized(object): self.cache[args] = self.func(*args) return self.cache[args] - def __get__(self, obj, objtype): """Support instance methods.""" return functools.partial(self.__call__, obj) - def clear(self): """Expunge cache so that self.func will be called again.""" self.cache.clear() @@ -237,13 +235,21 @@ def key_ordering(cls): if not has_method(cls, '_cmp_key'): raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__) - setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) - setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key()) - setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key()) - - setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) - setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key()) - setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key()) + setter('__eq__', + lambda s, o: + (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) + setter('__lt__', + lambda s, o: o is not None and s._cmp_key() < o._cmp_key()) + setter('__le__', + lambda s, o: o is not None and s._cmp_key() <= o._cmp_key()) + + setter('__ne__', + lambda s, o: + (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) + setter('__gt__', + lambda s, o: o is None or s._cmp_key() > o._cmp_key()) + setter('__ge__', + lambda s, o: o is None or s._cmp_key() >= o._cmp_key()) setter('__hash__', lambda self: hash(self._cmp_key())) @@ -254,10 +260,10 @@ def key_ordering(cls): class HashableMap(dict): """This is a hashable, comparable dictionary. Hash is performed on a tuple of the values in the dictionary.""" + def _cmp_key(self): return tuple(sorted(self.values())) - def copy(self): """Type-agnostic clone method. Preserves subclass type.""" # Construct a new dict of my type @@ -336,24 +342,39 @@ def match_predicate(*args): return match - def DictWrapper(dictionary): """Returns a class that wraps a dictionary and enables it to be used like an object.""" class wrapper(object): - def __getattr__(self, name): return dictionary[name] - def __setattr__(self, name, value): dictionary[name] = value - def setdefault(self, *args): return dictionary.setdefault(*args) - def get(self, *args): return dictionary.get(*args) - def keys(self): return dictionary.keys() - def values(self): return dictionary.values() - def items(self): return dictionary.items() - def __iter__(self): return iter(dictionary) + def __getattr__(self, name): + return dictionary[name] + + def __setattr__(self, name, value): + dictionary[name] = value + + def setdefault(self, *args): + return dictionary.setdefault(*args) + + def get(self, *args): + return dictionary.get(*args) + + def keys(self): + return dictionary.keys() + + def values(self): + return dictionary.values() + + def items(self): + return dictionary.items() + + def __iter__(self): + return iter(dictionary) return wrapper() class RequiredAttributeError(ValueError): + def __init__(self, message): super(RequiredAttributeError, self).__init__(message) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index b6d8796084..d6547e933a 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -23,12 +23,13 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## """LinkTree class for setting up trees of symbolic links.""" -__all__ = ['LinkTree'] import os import shutil from llnl.util.filesystem import * +__all__ = ['LinkTree'] + empty_file_name = '.spack-empty' @@ -43,13 +44,13 @@ class LinkTree(object): modified. """ + def __init__(self, source_root): if not os.path.exists(source_root): raise IOError("No such file or directory: '%s'", source_root) self._root = source_root - def find_conflict(self, dest_root, **kwargs): """Returns the first file in dest that conflicts with src""" kwargs['follow_nonexisting'] = False @@ -61,9 +62,9 @@ class LinkTree(object): return dest return None - def merge(self, dest_root, **kwargs): - """Link all files in src into dest, creating directories if necessary.""" + """Link all files in src into dest, creating directories + if necessary.""" kwargs['order'] = 'pre' for src, dest in traverse_tree(self._root, dest_root, **kwargs): if os.path.isdir(src): @@ -83,7 +84,6 @@ class LinkTree(object): assert(not os.path.exists(dest)) os.symlink(src, dest) - def unmerge(self, dest_root, **kwargs): """Unlink all files in dest that exist in src. diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 4a4aec2385..aa8272d5fe 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -47,6 +47,7 @@ class Lock(object): and recent NFS versions. """ + def __init__(self, file_path): self._file_path = file_path self._fd = None @@ -225,6 +226,7 @@ class LockTransaction(object): class ReadTransaction(LockTransaction): + def _enter(self): return self._lock.acquire_read(self._timeout) @@ -233,6 +235,7 @@ class ReadTransaction(LockTransaction): class WriteTransaction(LockTransaction): + def _enter(self): return self._lock.acquire_write(self._timeout) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index ee81e11a20..db74aaba6b 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -36,6 +36,7 @@ _debug = False _verbose = False indent = " " + def is_verbose(): return _verbose @@ -148,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs): elif default_value is False: prompt += ' [y/N] ' else: - raise ValueError("default for get_yes_no() must be True, False, or None.") + raise ValueError( + "default for get_yes_no() must be True, False, or None.") result = None while result is None: @@ -174,8 +176,9 @@ def hline(label=None, **kwargs): char = kwargs.pop('char', '-') max_width = kwargs.pop('max_width', 64) if kwargs: - raise TypeError("'%s' is an invalid keyword argument for this function." - % next(kwargs.iterkeys())) + raise TypeError( + "'%s' is an invalid keyword argument for this function." + % next(kwargs.iterkeys())) rows, cols = terminal_size() if not cols: @@ -200,7 +203,8 @@ def terminal_size(): """Gets the dimensions of the console: (rows, cols).""" def ioctl_GWINSZ(fd): try: - rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + rc = struct.unpack('hh', fcntl.ioctl( + fd, termios.TIOCGWINSZ, '1234')) except: return return rc diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 81a83691d7..67acdfa517 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -27,15 +27,14 @@ Routines for printing columnar output. See colify() for more information. """ import os import sys -import fcntl -import termios -import struct from StringIO import StringIO from llnl.util.tty import terminal_size from llnl.util.tty.color import clen, cextra + class ColumnConfig: + def __init__(self, cols): self.cols = cols self.line_length = 0 @@ -43,7 +42,8 @@ class ColumnConfig: self.widths = [0] * cols # does not include ansi colors def __repr__(self): - attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")] + attrs = [(a, getattr(self, a)) + for a in dir(self) if not a.startswith("__")] return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs) @@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0): max_cols = min(len(elts), max_cols) # Range of column counts to try. If forced, use the supplied value. - col_range = [cols] if cols else xrange(1, max_cols+1) + col_range = [cols] if cols else xrange(1, max_cols + 1) # Determine the most columns possible for the console width. configs = [ColumnConfig(c) for c in col_range] @@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0): # 'clen' ignores length of ansi color sequences. max_len = max(clen(e) for e in elts) + padding - max_clen = max(len(e) for e in elts) + padding if cols == 0: cols = max(1, console_width / max_len) cols = min(len(elts), cols) @@ -130,17 +129,19 @@ def colify(elts, **options): output=<stream> A file object to write to. Default is sys.stdout. indent=<int> Optionally indent all columns by some number of spaces. padding=<int> Spaces between columns. Default is 2. - width=<int> Width of the output. Default is 80 if tty is not detected. + width=<int> Width of the output. Default is 80 if tty not detected. cols=<int> Force number of columns. Default is to size to terminal, or single-column if no tty tty=<bool> Whether to attempt to write to a tty. Default is to - autodetect a tty. Set to False to force single-column output. + autodetect a tty. Set to False to force + single-column output. - method=<string> Method to use to fit columns. Options are variable or uniform. - Variable-width columns are tighter, uniform columns are all the - same width and fit less data on the screen. + method=<string> Method to use to fit columns. Options are variable or + uniform. Variable-width columns are tighter, uniform + columns are all the same width and fit less data on + the screen. """ # Get keyword arguments or set defaults cols = options.pop("cols", 0) @@ -152,8 +153,9 @@ def colify(elts, **options): console_cols = options.pop("width", None) if options: - raise TypeError("'%s' is an invalid keyword argument for this function." - % next(options.iterkeys())) + raise TypeError( + "'%s' is an invalid keyword argument for this function." + % next(options.iterkeys())) # elts needs to be an array of strings so we can count the elements elts = [str(elt) for elt in elts] @@ -167,7 +169,8 @@ def colify(elts, **options): r, c = env_size.split('x') console_rows, console_cols = int(r), int(c) tty = True - except: pass + except: + pass # Use only one column if not a tty. if not tty: @@ -228,6 +231,7 @@ def colify_table(table, **options): raise ValueError("Table is empty in colify_table!") columns = len(table[0]) + def transpose(): for i in xrange(columns): for row in table: diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 0abcb09b97..b0c00f1502 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -75,25 +75,27 @@ To output an @, use '@@'. To output a } inside braces, use '}}'. import re import sys + class ColorParseError(Exception): """Raised when a color format fails to parse.""" + def __init__(self, message): super(ColorParseError, self).__init__(message) # Text styles for ansi codes -styles = {'*' : '1', # bold - '_' : '4', # underline - None : '0' } # plain +styles = {'*': '1', # bold + '_': '4', # underline + None: '0'} # plain # Dim and bright ansi colors -colors = {'k' : 30, 'K' : 90, # black - 'r' : 31, 'R' : 91, # red - 'g' : 32, 'G' : 92, # green - 'y' : 33, 'Y' : 93, # yellow - 'b' : 34, 'B' : 94, # blue - 'm' : 35, 'M' : 95, # magenta - 'c' : 36, 'C' : 96, # cyan - 'w' : 37, 'W' : 97 } # white +colors = {'k': 30, 'K': 90, # black + 'r': 31, 'R': 91, # red + 'g': 32, 'G': 92, # green + 'y': 33, 'Y': 93, # yellow + 'b': 34, 'B': 94, # blue + 'm': 35, 'M': 95, # magenta + 'c': 36, 'C': 96, # cyan + 'w': 37, 'W': 97} # white # Regex to be used for color formatting color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)' @@ -104,6 +106,7 @@ _force_color = False class match_to_ansi(object): + def __init__(self, color=True): self.color = color @@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None): """Same as cwrite, but writes a trailing newline to the stream.""" cwrite(string + "\n", stream, color) + def cescape(string): """Replace all @ with @@ in the string provided.""" return str(string).replace('@', '@@') class ColorStream(object): + def __init__(self, stream, color=None): self._stream = stream self._color = color @@ -196,7 +201,7 @@ class ColorStream(object): color = self._color if self._color is None: if raw: - color=True + color = True else: color = self._stream.isatty() or _force_color raw_write(colorize(string, color=color)) diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index ca82da7b17..b67edcf9cc 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -36,6 +36,7 @@ import llnl.util.tty.color as color # Use this to strip escape sequences _escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h') + def _strip(line): """Strip color and control characters from a line.""" return _escape.sub('', line) @@ -58,10 +59,10 @@ class keyboard_input(object): When the with block completes, this will restore settings before canonical and echo were disabled. """ + def __init__(self, stream): self.stream = stream - def __enter__(self): self.old_cfg = None @@ -86,10 +87,9 @@ class keyboard_input(object): # Apply new settings for terminal termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg) - except Exception, e: + except Exception: pass # Some OS's do not support termios, so ignore. - def __exit__(self, exc_type, exception, traceback): # If termios was avaialble, restore old settings after the # with block @@ -114,6 +114,7 @@ class log_output(object): Closes the provided stream when done with the block. If echo is True, also prints the output to stdout. """ + def __init__(self, stream, echo=False, force_color=False, debug=False): self.stream = stream @@ -122,7 +123,7 @@ class log_output(object): self.force_color = force_color self.debug = debug - # Default is to try file-descriptor reassignment unless the system + # Default is to try file-descriptor reassignment unless the system # out/err streams do not have an associated file descriptor self.directAssignment = False @@ -130,7 +131,6 @@ class log_output(object): """Jumps to __exit__ on the child process.""" raise _SkipWithBlock() - def __enter__(self): """Redirect output from the with block to a file. @@ -154,7 +154,8 @@ class log_output(object): with self.stream as log_file: with keyboard_input(sys.stdin): while True: - rlist, w, x = select.select([read_file, sys.stdin], [], []) + rlist, w, x = select.select( + [read_file, sys.stdin], [], []) if not rlist: break @@ -211,7 +212,6 @@ class log_output(object): if self.debug: tty._debug = True - def __exit__(self, exc_type, exception, traceback): """Exits on child, handles skipping the with block on parent.""" # Child should just exit here. @@ -235,7 +235,7 @@ class log_output(object): sys.stderr = self._stderr else: os.dup2(self._stdout, sys.stdout.fileno()) - os.dup2(self._stderr, sys.stderr.fileno()) + os.dup2(self._stderr, sys.stderr.fileno()) return False diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index 38cff62af4..064abb9782 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -30,15 +30,15 @@ from spack.spec import CompilerSpec from spack.util.executable import Executable, ProcessError from llnl.util.lang import memoized + class ABI(object): """This class provides methods to test ABI compatibility between specs. The current implementation is rather rough and could be improved.""" def architecture_compatible(self, parent, child): - """Returns true iff the parent and child specs have ABI compatible targets.""" - return not parent.architecture or not child.architecture \ - or parent.architecture == child.architecture - + """Return true if parent and child have ABI compatible targets.""" + return not parent.architecture or not child.architecture or \ + parent.architecture == child.architecture @memoized def _gcc_get_libstdcxx_version(self, version): @@ -61,8 +61,9 @@ class ABI(object): else: return None try: - output = rungcc("--print-file-name=%s" % libname, return_output=True) - except ProcessError, e: + output = rungcc("--print-file-name=%s" % libname, + return_output=True) + except ProcessError: return None if not output: return None @@ -71,7 +72,6 @@ class ABI(object): return None return os.path.basename(libpath) - @memoized def _gcc_compiler_compare(self, pversion, cversion): """Returns true iff the gcc version pversion and cversion @@ -82,7 +82,6 @@ class ABI(object): return False return plib == clib - def _intel_compiler_compare(self, pversion, cversion): """Returns true iff the intel version pversion and cversion are ABI compatible""" @@ -92,9 +91,8 @@ class ABI(object): return False return pversion.version[:2] == cversion.version[:2] - def compiler_compatible(self, parent, child, **kwargs): - """Returns true iff the compilers for parent and child specs are ABI compatible""" + """Return true if compilers for parent and child are ABI compatible.""" if not parent.compiler or not child.compiler: return True @@ -109,8 +107,8 @@ class ABI(object): # TODO: into compiler classes? for pversion in parent.compiler.versions: for cversion in child.compiler.versions: - # For a few compilers use specialized comparisons. Otherwise - # match on version match. + # For a few compilers use specialized comparisons. + # Otherwise match on version match. if pversion.satisfies(cversion): return True elif (parent.compiler.name == "gcc" and @@ -121,9 +119,8 @@ class ABI(object): return True return False - def compatible(self, parent, child, **kwargs): """Returns true iff a parent and child spec are ABI compatible""" loosematch = kwargs.get('loose', False) return self.architecture_compatible(parent, child) and \ - self.compiler_compatible(parent, child, loose=loosematch) + self.compiler_compatible(parent, child, loose=loosematch) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 886e170b1a..0d210f9741 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -91,6 +91,7 @@ import spack.error as serr class NoPlatformError(serr.SpackError): + def __init__(self): super(NoPlatformError, self).__init__( "Could not determine a platform for this machine.") diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 230115df50..f69f434afd 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -240,4 +240,4 @@ def display_specs(specs, **kwargs): else: raise ValueError( "Invalid mode for display_specs: %s. Must be one of (paths," - "deps, short)." % mode) # NOQA: ignore=E501 + "deps, short)." % mode) diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py index 9867fa8835..797cdcb136 100644 --- a/lib/spack/spack/cmd/activate.py +++ b/lib/spack/spack/cmd/activate.py @@ -29,12 +29,14 @@ import spack.cmd description = "Activate a package extension." + def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', help="Activate without first activating dependencies.") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package extension to activate.") def activate(parser, args): diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py index cf2f96fd21..1badd40f7f 100644 --- a/lib/spack/spack/cmd/arch.py +++ b/lib/spack/spack/cmd/arch.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import spack import spack.architecture as architecture description = "Print the architecture for this machine" + def arch(parser, args): print architecture.sys_type() diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py index aa45f67ae1..cf7232258c 100644 --- a/lib/spack/spack/cmd/cd.py +++ b/lib/spack/spack/cmd/cd.py @@ -25,7 +25,8 @@ import spack.cmd.location import spack.modules -description="cd to spack directories in the shell." +description = "cd to spack directories in the shell." + def setup_parser(subparser): """This is for decoration -- spack cd is used through spack's diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 514c5874ef..dc62fbcaf6 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -31,6 +31,7 @@ import spack.cmd description = "Remove build stage and source tarball for packages." + def setup_parser(subparser): subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to clean") diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index af04170824..afcba33714 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -35,7 +35,7 @@ _arguments = {} def add_common_arguments(parser, list_of_arguments): for argument in list_of_arguments: if argument not in _arguments: - message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501 + message = 'Trying to add non existing argument "{0}" to a command' raise KeyError(message.format(argument)) x = _arguments[argument] parser.add_argument(*x.flags, **x.kwargs) @@ -82,7 +82,7 @@ parms = Bunch( kwargs={ 'action': 'store_true', 'dest': 'yes_to_all', - 'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 + 'help': 'Assume "yes" is the answer to every confirmation request.' }) _arguments['yes_to_all'] = parms diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index c325628ebf..ea91c71479 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -37,6 +37,7 @@ from spack.util.environment import get_path description = "Manage compilers" + def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='compiler_command') @@ -44,48 +45,58 @@ def setup_parser(subparser): scopes = spack.config.config_scopes # Find - find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.') + find_parser = sp.add_parser( + 'find', aliases=['add'], + help='Search the system for compilers to add to Spack configuration.') find_parser.add_argument('add_paths', nargs=argparse.REMAINDER) - find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, - help="Configuration scope to modify.") + find_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") # Remove - remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.') + remove_parser = sp.add_parser( + 'remove', aliases=['rm'], help='Remove compiler by spec.') remove_parser.add_argument( - '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.') + '-a', '--all', action='store_true', + help='Remove ALL compilers that match spec.') remove_parser.add_argument('compiler_spec') - remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, - help="Configuration scope to modify.") + remove_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") # List list_parser = sp.add_parser('list', help='list available compilers') - list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, - help="Configuration scope to read from.") + list_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") # Info info_parser = sp.add_parser('info', help='Show compiler paths.') info_parser.add_argument('compiler_spec') - info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, - help="Configuration scope to read from.") + info_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") def compiler_find(args): - """Search either $PATH or a list of paths OR MODULES for compilers and add them - to Spack's configuration.""" + """Search either $PATH or a list of paths OR MODULES for compilers and + add them to Spack's configuration. + + """ paths = args.add_paths if not paths: paths = get_path('PATH') - # Don't initialize compilers config via compilers.get_compiler_config. - # Just let compiler_find do the + # Don't initialize compilers config via compilers.get_compiler_config. + # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(*paths) if c.spec not in spack.compilers.all_compilers( - scope=args.scope, init_config=False)] + scope=args.scope, init_config=False)] if compilers: spack.compilers.add_compilers_to_config(compilers, scope=args.scope, - init_config=False) + init_config=False) n = len(compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') @@ -103,11 +114,12 @@ def compiler_remove(args): elif not args.all and len(compilers) > 1: tty.error("Multiple compilers match spec %s. Choose one:" % cspec) colify(reversed(sorted([c.spec for c in compilers])), indent=4) - tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.") + tty.msg("Or, use `spack compiler remove -a` to remove all of them.") sys.exit(1) for compiler in compilers: - spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) + spack.compilers.remove_compiler_from_config( + compiler.spec, scope=args.scope) tty.msg("Removed compiler %s" % compiler.spec) @@ -133,7 +145,8 @@ def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name') for i, (name, compilers) in enumerate(index.items()): - if i >= 1: print + if i >= 1: + print cname = "%s{%s}" % (spack.spec.compiler_color, name) tty.hline(colorize(cname), char='-') @@ -141,10 +154,10 @@ def compiler_list(args): def compiler(parser, args): - action = {'add' : compiler_find, - 'find' : compiler_find, - 'remove' : compiler_remove, - 'rm' : compiler_remove, - 'info' : compiler_info, - 'list' : compiler_list } + action = {'add': compiler_find, + 'find': compiler_find, + 'remove': compiler_remove, + 'rm': compiler_remove, + 'info': compiler_info, + 'list': compiler_list} action[args.compiler_command](args) diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py index 9fbc2bb952..b87f977e5a 100644 --- a/lib/spack/spack/cmd/compilers.py +++ b/lib/spack/spack/cmd/compilers.py @@ -22,18 +22,16 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import llnl.util.tty as tty -from llnl.util.tty.colify import colify -from llnl.util.lang import index_by - import spack from spack.cmd.compiler import compiler_list description = "List available compilers. Same as 'spack compiler list'." + def setup_parser(subparser): subparser.add_argument('--scope', choices=spack.config.config_scopes, help="Configuration scope to read/modify.") + def compilers(parser, args): compiler_list(args) diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index d6f56c270d..c189e37036 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -22,15 +22,11 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys -import argparse - -import llnl.util.tty as tty - import spack.config description = "Get and set configuration options." + def setup_parser(subparser): # User can only choose one scope_group = subparser.add_mutually_exclusive_group() @@ -64,6 +60,6 @@ def config_edit(args): def config(parser, args): - action = { 'get' : config_get, - 'edit' : config_edit } + action = {'get': config_get, + 'edit': config_edit} action[args.config_command](args) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 51bf17a44b..52a82eb38f 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -217,6 +217,7 @@ def setup_parser(subparser): class BuildSystemGuesser(object): + def __call__(self, stage, url): """Try to guess the type of build system used by a project based on the contents of its archive or the URL it was downloaded from.""" diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index 990309ee48..2b15a0331e 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -31,6 +31,7 @@ from spack.graph import topological_sort description = "Deactivate a package extension." + def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', @@ -40,7 +41,8 @@ def setup_parser(subparser): help="Deactivate all extensions of an extendable package, or " "deactivate an extension AND its dependencies.") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package extension to deactivate.") def deactivate(parser, args): @@ -65,7 +67,8 @@ def deactivate(parser, args): if not args.force and not spec.package.activated: tty.die("%s is not activated." % pkg.spec.short_spec) - tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec) + tty.msg("Deactivating %s and all dependencies." % + pkg.spec.short_spec) topo_order = topological_sort(spec) index = spec.index() @@ -79,7 +82,9 @@ def deactivate(parser, args): epkg.do_deactivate(force=args.force) else: - tty.die("spack deactivate --all requires an extendable package or an extension.") + tty.die( + "spack deactivate --all requires an extendable package " + "or an extension.") else: if not pkg.is_extension: diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py index 78eb6847b8..7729105e62 100644 --- a/lib/spack/spack/cmd/dependents.py +++ b/lib/spack/spack/cmd/dependents.py @@ -31,9 +31,11 @@ import spack.cmd description = "Show installed packages that depend on another." + def setup_parser(subparser): subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.") + 'spec', nargs=argparse.REMAINDER, + help="specs to list dependencies of.") def dependents(parser, args): @@ -42,5 +44,6 @@ def dependents(parser, args): tty.die("spack dependents takes only one spec.") fmt = '$_$@$%@$+$=$#' - deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents] + deps = [d.format(fmt, color=True) + for d in specs[0].package.installed_dependents] tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 643e6374b2..487654d261 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -35,6 +35,7 @@ from spack.stage import DIYStage description = "Do-It-Yourself: build from an existing source directory." + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -76,14 +77,17 @@ def diy(self, args): return if not spec.versions.concrete: - tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?") + tty.die( + "spack diy spec must have a single, concrete version. " + "Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) if package.installed: tty.error("Already installed in %s" % package.prefix) - tty.msg("Uninstall or try adding a version suffix for this DIY build.") + tty.msg("Uninstall or try adding a version suffix for this " + "DIY build.") sys.exit(1) # Forces the build to run out of the current directory. diff --git a/lib/spack/spack/cmd/doc.py b/lib/spack/spack/cmd/doc.py index b3d0737d13..291b17216f 100644 --- a/lib/spack/spack/cmd/doc.py +++ b/lib/spack/spack/cmd/doc.py @@ -25,6 +25,7 @@ description = "Run pydoc from within spack." + def setup_parser(subparser): subparser.add_argument('entity', help="Run pydoc help on entity") diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index 49ab83867a..286136dd67 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False): if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) - if not os.access(path, os.R_OK|os.W_OK): + if not os.access(path, os.R_OK | os.W_OK): tty.die("Insufficient permissions on '%s'!" % path) elif not force: tty.die("No package '%s'. Use spack create, or supply -f/--force " @@ -93,19 +93,23 @@ def setup_parser(subparser): # Various filetypes you can edit directly from the cmd line. excl_args.add_argument( '-c', '--command', dest='path', action='store_const', - const=spack.cmd.command_path, help="Edit the command with the supplied name.") + const=spack.cmd.command_path, + help="Edit the command with the supplied name.") excl_args.add_argument( '-t', '--test', dest='path', action='store_const', const=spack.test_path, help="Edit the test with the supplied name.") excl_args.add_argument( '-m', '--module', dest='path', action='store_const', - const=spack.module_path, help="Edit the main spack module with the supplied name.") + const=spack.module_path, + help="Edit the main spack module with the supplied name.") # Options for editing packages excl_args.add_argument( - '-r', '--repo', default=None, help="Path to repo to edit package in.") + '-r', '--repo', default=None, + help="Path to repo to edit package in.") excl_args.add_argument( - '-N', '--namespace', default=None, help="Namespace of package to edit.") + '-N', '--namespace', default=None, + help="Namespace of package to edit.") subparser.add_argument( 'name', nargs='?', default=None, help="name of package to edit") diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 85d111e91e..f3bad039d4 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -28,11 +28,13 @@ import llnl.util.tty as tty import spack.cmd import spack.build_environment as build_env -description = "Run a command with the environment for a particular spec's install." +description = "Run a command with the install environment for a spec." + def setup_parser(subparser): subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.") + 'spec', nargs=argparse.REMAINDER, + help="specs of package environment to emulate.") def env(parser, args): @@ -47,7 +49,7 @@ def env(parser, args): if sep in args.spec: s = args.spec.index(sep) spec = args.spec[:s] - cmd = args.spec[s+1:] + cmd = args.spec[s + 1:] else: spec = args.spec[0] cmd = args.spec[1:] diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 11659e0c96..b5c484305f 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import argparse import llnl.util.tty as tty @@ -34,6 +33,7 @@ import spack.cmd.find description = "List extensions for package." + def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() format_group.add_argument( @@ -47,7 +47,8 @@ def setup_parser(subparser): help='Show full dependency DAG of extensions') subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to list extensions for') def extensions(parser, args): @@ -85,7 +86,8 @@ def extensions(parser, args): # # List specs of installed extensions. # - installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] + installed = [ + s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") @@ -102,4 +104,5 @@ def extensions(parser, args): tty.msg("None activated.") return tty.msg("%d currently activated:" % len(activated)) - spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long) + spack.cmd.find.display_specs( + activated.values(), mode=args.mode, long=args.long) diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py index 1afc51d9fa..c1ac2ed48d 100644 --- a/lib/spack/spack/cmd/fetch.py +++ b/lib/spack/spack/cmd/fetch.py @@ -29,16 +29,21 @@ import spack.cmd description = "Fetch archives for packages" + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check packages against checksum") subparser.add_argument( - '-m', '--missing', action='store_true', help="Also fetch all missing dependencies") + '-m', '--missing', action='store_true', + help="Also fetch all missing dependencies") subparser.add_argument( - '-D', '--dependencies', action='store_true', help="Also fetch all dependencies") + '-D', '--dependencies', action='store_true', + help="Also fetch all dependencies") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to fetch") + def fetch(parser, args): if not args.packages: @@ -50,7 +55,6 @@ def fetch(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: if args.missing or args.dependencies: - to_fetch = set() for s in spec.traverse(deptype_query=spack.alldeps): package = spack.repo.get(s) if args.missing and package.installed: diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index da65121836..8faabfbb7b 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -30,6 +30,7 @@ from spack.graph import * description = "Generate graphs of package dependency relationships." + def setup_parser(subparser): setup_parser.parser = subparser @@ -42,10 +43,12 @@ def setup_parser(subparser): help="Generate graph in dot format and print to stdout.") subparser.add_argument( - '--concretize', action='store_true', help="Concretize specs before graphing.") + '--concretize', action='store_true', + help="Concretize specs before graphing.") subparser.add_argument( - 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.") + 'specs', nargs=argparse.REMAINDER, + help="specs of packages to graph.") def graph(parser, args): @@ -56,11 +59,11 @@ def graph(parser, args): setup_parser.parser.print_help() return 1 - if args.dot: # Dot graph only if asked for. + if args.dot: # Dot graph only if asked for. graph_dot(*specs) - elif specs: # ascii is default: user doesn't need to provide it explicitly + elif specs: # ascii is default: user doesn't need to provide it explicitly graph_ascii(specs[0], debug=spack.debug) for spec in specs[1:]: - print # extra line bt/w independent graphs + print # extra line bt/w independent graphs graph_ascii(spec, debug=spack.debug) diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py index 1d23161839..5bc8fc3e74 100644 --- a/lib/spack/spack/cmd/help.py +++ b/lib/spack/spack/cmd/help.py @@ -22,14 +22,14 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys - description = "Get help on spack and its commands" + def setup_parser(subparser): subparser.add_argument('help_command', nargs='?', default=None, help='command to get help on') + def help(parser, args): if args.help_command: parser.parse_args([args.help_command, '-h']) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 4c076322a9..7663a97a28 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -31,6 +31,7 @@ import spack.cmd description = "Build and install packages" + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -52,18 +53,18 @@ def setup_parser(subparser): help="Display verbose build output while installing.") subparser.add_argument( '--fake', action='store_true', dest='fake', - help="Fake install. Just remove the prefix and touch a fake file in it.") + help="Fake install. Just remove prefix and create a fake file.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', help="Install a package *without* cleaning the environment.") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to install") subparser.add_argument( '--run-tests', action='store_true', dest='run_tests', help="Run tests during installation of a package.") - def install(parser, args): if not args.packages: tty.die("install requires at least one package argument") diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index 205abbb6b3..85190a5d0b 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -25,13 +25,16 @@ import argparse import spack.modules -description ="Add package to environment using modules." +description = "Add package to environment using modules." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="Spec of package to load with modules. (If -, read specs from STDIN)") + 'spec', nargs=argparse.REMAINDER, + help="Spec of package to load with modules. " + "(If -, read specs from STDIN)") def load(parser, args): diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index b0dbb1a550..b9c8b5c330 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -22,8 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import sys import argparse import llnl.util.tty as tty @@ -32,16 +30,19 @@ from llnl.util.filesystem import join_path import spack import spack.cmd -description="Print out locations of various directories used by Spack" +description = "Print out locations of various directories used by Spack" + def setup_parser(subparser): global directories directories = subparser.add_mutually_exclusive_group() directories.add_argument( - '-m', '--module-dir', action='store_true', help="Spack python module directory.") + '-m', '--module-dir', action='store_true', + help="Spack python module directory.") directories.add_argument( - '-r', '--spack-root', action='store_true', help="Spack installation root.") + '-r', '--spack-root', action='store_true', + help="Spack installation root.") directories.add_argument( '-i', '--install-dir', action='store_true', @@ -53,15 +54,19 @@ def setup_parser(subparser): '-P', '--packages', action='store_true', help="Top-level packages directory for Spack.") directories.add_argument( - '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.") + '-s', '--stage-dir', action='store_true', + help="Stage directory for a spec.") directories.add_argument( - '-S', '--stages', action='store_true', help="Top level Stage directory.") + '-S', '--stages', action='store_true', + help="Top level Stage directory.") directories.add_argument( '-b', '--build-dir', action='store_true', - help="Checked out or expanded source directory for a spec (requires it to be staged first).") + help="Checked out or expanded source directory for a spec " + "(requires it to be staged first).") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package to fetch directory for.") def location(parser, args): @@ -104,9 +109,9 @@ def location(parser, args): if args.stage_dir: print pkg.stage.path - else: # args.build_dir is the default. + else: # args.build_dir is the default. if not pkg.stage.source_path: - tty.die("Build directory does not exist yet. Run this to create it:", + tty.die("Build directory does not exist yet. " + "Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.source_path - diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 0cf682fc4f..585faaf524 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -23,7 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import sys from datetime import datetime import argparse @@ -40,6 +39,7 @@ from spack.util.spack_yaml import syaml_dict description = "Manage mirrors." + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', @@ -61,8 +61,9 @@ def setup_parser(subparser): '-D', '--dependencies', action='store_true', help="Also fetch all dependencies") create_parser.add_argument( - '-o', '--one-version-per-spec', action='store_const', const=1, default=0, - help="Only fetch one 'preferred' version per spec, not all known versions.") + '-o', '--one-version-per-spec', action='store_const', + const=1, default=0, + help="Only fetch one 'preferred' version per spec, not all known.") scopes = spack.config.config_scopes @@ -70,7 +71,7 @@ def setup_parser(subparser): add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser.add_argument('name', help="Mnemonic name for mirror.") add_parser.add_argument( - 'url', help="URL of mirror directory created by 'spack mirror create'.") + 'url', help="URL of mirror directory from 'spack mirror create'.") add_parser.add_argument( '--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") @@ -107,7 +108,7 @@ def mirror_add(args): tty.die("Mirror with url %s already exists." % url) # should only be one item per mirror dict. - items = [(n,u) for n,u in mirrors.items()] + items = [(n, u) for n, u in mirrors.items()] items.insert(0, (args.name, url)) mirrors = syaml_dict(items) spack.config.update_config('mirrors', mirrors, scope=args.scope) @@ -121,7 +122,7 @@ def mirror_remove(args): if not mirrors: mirrors = syaml_dict() - if not name in mirrors: + if name not in mirrors: tty.die("No mirror with name %s" % name) old_value = mirrors.pop(name) @@ -152,7 +153,7 @@ def _read_specs_from_file(filename): s.package specs.append(s) except SpackError, e: - tty.die("Parse error in %s, line %d:" % (args.file, i+1), + tty.die("Parse error in %s, line %d:" % (args.file, i + 1), ">>> " + string, str(e)) return specs @@ -214,10 +215,10 @@ def mirror_create(args): def mirror(parser, args): - action = { 'create' : mirror_create, - 'add' : mirror_add, - 'remove' : mirror_remove, - 'rm' : mirror_remove, - 'list' : mirror_list } + action = {'create': mirror_create, + 'add': mirror_add, + 'remove': mirror_remove, + 'rm': mirror_remove, + 'list': mirror_list} action[args.mirror_command](args) diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index a10e36e077..2d0b83fe00 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -118,7 +118,8 @@ def loads(mtype, specs, args): seen_add = seen.add for spec in specs_from_user_constraint: specs.extend( - [item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501 + [item for item in spec.traverse(order='post', cover='nodes') + if not (item in seen or seen_add(item))] ) module_cls = module_types[mtype] @@ -178,7 +179,9 @@ def rm(mtype, specs, args): # Ask for confirmation if not args.yes_to_all: - tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501 + tty.msg( + 'You are about to remove {0} module files the following specs:\n' + .format(mtype)) spack.cmd.display_specs(specs_with_modules, long=True) print('') spack.cmd.ask_for_confirmation('Do you want to proceed ? ') @@ -197,7 +200,9 @@ def refresh(mtype, specs, args): return if not args.yes_to_all: - tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501 + tty.msg( + 'You are about to regenerate {name} module files for:\n' + .format(name=mtype)) spack.cmd.display_specs(specs, long=True) print('') spack.cmd.ask_for_confirmation('Do you want to proceed ? ') @@ -245,11 +250,13 @@ def module(parser, args): try: callbacks[args.subparser_name](module_type, args.specs, args) except MultipleMatches: - message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501 + message = ('the constraint \'{query}\' matches multiple packages, ' + 'and this is not allowed in this context') tty.error(message.format(query=constraint)) for s in args.specs: sys.stderr.write(s.format(color=True) + '\n') raise SystemExit(1) except NoMatch: - message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501 + message = ('the constraint \'{query}\' match no package, ' + 'and this is not allowed in this context') tty.die(message.format(query=constraint)) diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index a27502d30e..9ed42de823 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -32,7 +32,7 @@ description = "Print a list of all packages in reStructuredText." def github_url(pkg): """Link to a package file on github.""" - url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501 + url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" return (url % pkg.name) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index a5507e42cf..9c72da40b5 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -29,14 +29,16 @@ import spack.cmd import spack -description="Patch expanded archive sources in preparation for install" +description = "Patch expanded archive sources in preparation for install" + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check downloaded packages against checksum") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to stage") def patch(parser, args): diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index a24c2759fe..7791b93cf5 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -33,6 +33,7 @@ from spack.util.executable import * description = "Query packages associated with particular git revisions." + def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='pkg_command') @@ -46,22 +47,28 @@ def setup_parser(subparser): help="Revision to list packages for.") diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__) - diff_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - diff_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + diff_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + diff_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") add_parser = sp.add_parser('added', help=pkg_added.__doc__) - add_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - add_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + add_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + add_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__) - rm_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - rm_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + rm_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + rm_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") def get_git(): @@ -88,7 +95,8 @@ def pkg_add(args): for pkg_name in args.packages: filename = spack.repo.filename_for_package_name(pkg_name) if not os.path.isfile(filename): - tty.die("No such package: %s. Path does not exist:" % pkg_name, filename) + tty.die("No such package: %s. Path does not exist:" % + pkg_name, filename) git = get_git() git('-C', spack.packages_path, 'add', filename) @@ -112,7 +120,8 @@ def pkg_diff(args): if u1: print "%s:" % args.rev1 colify(sorted(u1), indent=4) - if u1: print + if u1: + print if u2: print "%s:" % args.rev2 @@ -122,19 +131,21 @@ def pkg_diff(args): def pkg_removed(args): """Show packages removed since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) - if u1: colify(sorted(u1)) + if u1: + colify(sorted(u1)) def pkg_added(args): """Show packages added since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) - if u2: colify(sorted(u2)) + if u2: + colify(sorted(u2)) def pkg(parser, args): - action = { 'add' : pkg_add, - 'diff' : pkg_diff, - 'list' : pkg_list, - 'removed' : pkg_removed, - 'added' : pkg_added } + action = {'add': pkg_add, + 'diff': pkg_diff, + 'list': pkg_list, + 'removed': pkg_removed, + 'added': pkg_added} action[args.pkg_command](args) diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py index e9007486d2..0f4a97cc4a 100644 --- a/lib/spack/spack/cmd/providers.py +++ b/lib/spack/spack/cmd/providers.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse from llnl.util.tty.colify import colify @@ -30,11 +29,13 @@ from llnl.util.tty.colify import colify import spack import spack.cmd -description ="List packages that provide a particular virtual package" +description = "List packages that provide a particular virtual package" + def setup_parser(subparser): - subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER, - help='Find packages that provide this virtual package') + subparser.add_argument( + 'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER, + help='Find packages that provide this virtual package') def providers(parser, args): diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py index 59423271b9..12727cb599 100644 --- a/lib/spack/spack/cmd/python.py +++ b/lib/spack/spack/cmd/python.py @@ -30,18 +30,22 @@ import platform import spack + def setup_parser(subparser): subparser.add_argument( '-c', dest='python_command', help='Command to execute.') subparser.add_argument( - 'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.") + 'python_args', nargs=argparse.REMAINDER, + help="File to run plus arguments.") + description = "Launch an interpreter as spack would launch a command" + def python(parser, args): # Fake a main python shell by setting __name__ to __main__. - console = code.InteractiveConsole({'__name__' : '__main__', - 'spack' : spack}) + console = code.InteractiveConsole({'__name__': '__main__', + 'spack': spack}) if "PYTHONSTARTUP" in os.environ: startup_file = os.environ["PYTHONSTARTUP"] diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py index 93eba7a0f1..e37eebbd92 100644 --- a/lib/spack/spack/cmd/reindex.py +++ b/lib/spack/spack/cmd/reindex.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse import spack description = "Rebuild Spack's package database." + def reindex(parser, args): spack.installed_db.reindex(spack.install_layout) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index cbd8f4784e..5ab2ac0833 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -23,20 +23,16 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import re -import shutil -import argparse import llnl.util.tty as tty -from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config -from spack.util.environment import get_path from spack.repository import * description = "Manage package source repositories." + def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') scopes = spack.config.config_scopes @@ -57,13 +53,15 @@ def setup_parser(subparser): # Add add_parser = sp.add_parser('add', help=repo_add.__doc__) - add_parser.add_argument('path', help="Path to a Spack package repository directory.") + add_parser.add_argument( + 'path', help="Path to a Spack package repository directory.") add_parser.add_argument( '--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") # Remove - remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm']) + remove_parser = sp.add_parser( + 'remove', help=repo_remove.__doc__, aliases=['rm']) remove_parser.add_argument( 'path_or_namespace', help="Path or namespace of a Spack package repository.") @@ -100,7 +98,8 @@ def repo_add(args): # If that succeeds, finally add it to the configuration. repos = spack.config.get_config('repos', args.scope) - if not repos: repos = [] + if not repos: + repos = [] if repo.root in repos or path in repos: tty.die("Repository is already registered with Spack: %s" % path) @@ -135,7 +134,7 @@ def repo_remove(args): tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace)) return - except RepoError as e: + except RepoError: continue tty.die("No repository with path or namespace: %s" @@ -149,7 +148,7 @@ def repo_list(args): for r in roots: try: repos.append(Repo(r)) - except RepoError as e: + except RepoError: continue msg = "%d package repositor" % len(repos) @@ -166,9 +165,9 @@ def repo_list(args): def repo(parser, args): - action = { 'create' : repo_create, - 'list' : repo_list, - 'add' : repo_add, - 'remove' : repo_remove, - 'rm' : repo_remove} + action = {'create': repo_create, + 'list': repo_list, + 'add': repo_add, + 'remove': repo_remove, + 'rm': repo_remove} action[args.repo_command](args) diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py index 325d30662f..969afe09bd 100644 --- a/lib/spack/spack/cmd/restage.py +++ b/lib/spack/spack/cmd/restage.py @@ -31,6 +31,7 @@ import spack.cmd description = "Revert checked out package source code." + def setup_parser(subparser): subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to restage") diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py index 04f3d663df..b55e102c0e 100644 --- a/lib/spack/spack/cmd/setup.py +++ b/lib/spack/spack/cmd/setup.py @@ -35,6 +35,7 @@ from spack.stage import DIYStage description = "Create a configuration script and module, but don't build." + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -70,7 +71,9 @@ def setup(self, args): return if not spec.versions.concrete: - tty.die("spack setup spec must have a single, concrete version. Did you forget a package version number?") + tty.die( + "spack setup spec must have a single, concrete version. " + "Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) @@ -84,8 +87,8 @@ def setup(self, args): spack.do_checksum = False package.do_install( - keep_prefix=True, # Don't remove install directory, even if you think you should + keep_prefix=True, # Don't remove install directory ignore_deps=args.ignore_deps, verbose=args.verbose, keep_stage=True, # don't remove source dir for SETUP. - install_phases = set(['setup', 'provenance'])) + install_phases=set(['setup', 'provenance'])) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 321e3e429b..6e6d1c1277 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -25,23 +25,22 @@ import argparse import spack.cmd -import llnl.util.tty as tty - import spack -import spack.url as url description = "print out abstract and concrete versions of a spec." + def setup_parser(subparser): subparser.add_argument('-i', '--ids', action='store_true', help="show numerical ids for dependencies.") - subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages") + subparser.add_argument( + 'specs', nargs=argparse.REMAINDER, help="specs of packages") def spec(parser, args): - kwargs = { 'ids' : args.ids, - 'indent' : 2, - 'color' : True } + kwargs = {'ids': args.ids, + 'indent': 2, + 'color': True} for spec in spack.cmd.parse_specs(args.specs): print "Input spec" diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index 61e9c6d9ff..bfc2e5f456 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -22,14 +22,14 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse import llnl.util.tty as tty import spack import spack.cmd -description="Expand downloaded archive in preparation for install" +description = "Expand downloaded archive in preparation for install" + def setup_parser(subparser): subparser.add_argument( diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 14c06d136d..8e7173e9a2 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -36,25 +36,25 @@ from llnl.util.filesystem import * from spack.build_environment import InstallError from spack.fetch_strategy import FetchError -description = "Run package installation as a unit test, output formatted results." +description = "Run package install as a unit test, output formatted results." def setup_parser(subparser): - subparser.add_argument('-j', - '--jobs', - action='store', - type=int, - help="Explicitly set number of make jobs. Default is #cpus.") + subparser.add_argument( + '-j', '--jobs', action='store', type=int, + help="Explicitly set number of make jobs. Default is #cpus.") - subparser.add_argument('-n', - '--no-checksum', - action='store_true', - dest='no_checksum', - help="Do not check packages against checksum") + subparser.add_argument( + '-n', '--no-checksum', action='store_true', dest='no_checksum', + help="Do not check packages against checksum") - subparser.add_argument('-o', '--output', action='store', help="test output goes in this file") + subparser.add_argument( + '-o', '--output', action='store', + help="test output goes in this file") - subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install") + subparser.add_argument( + 'package', nargs=argparse.REMAINDER, + help="spec of package to install") class TestResult(object): @@ -65,6 +65,7 @@ class TestResult(object): class TestSuite(object): + def __init__(self, filename): self.filename = filename self.root = ET.Element('testsuite') @@ -75,14 +76,17 @@ class TestSuite(object): def append(self, item): if not isinstance(item, TestCase): - raise TypeError('only TestCase instances may be appended to a TestSuite instance') + raise TypeError( + 'only TestCase instances may be appended to TestSuite') self.tests.append(item) # Append the item to the list of tests def __exit__(self, exc_type, exc_val, exc_tb): # Prepare the header for the entire test suite - number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests) + number_of_errors = sum( + x.result_type == TestResult.ERRORED for x in self.tests) self.root.set('errors', str(number_of_errors)) - number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests) + number_of_failures = sum( + x.result_type == TestResult.FAILED for x in self.tests) self.root.set('failures', str(number_of_failures)) self.root.set('tests', str(len(self.tests))) @@ -112,7 +116,8 @@ class TestCase(object): self.element.set('time', str(time)) self.result_type = None - def set_result(self, result_type, message=None, error_type=None, text=None): + def set_result(self, result_type, + message=None, error_type=None, text=None): self.result_type = result_type result = TestCase.results[self.result_type] if result is not None and result is not TestResult.PASSED: @@ -155,13 +160,19 @@ def install_single_spec(spec, number_of_jobs): # If it is already installed, skip the test if spack.repo.get(spec).installed: testcase = TestCase(package.name, package.spec.short_spec, time=0.0) - testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed') + testcase.set_result( + TestResult.SKIPPED, + message='Skipped [already installed]', + error_type='already_installed') return testcase # If it relies on dependencies that did not install, skip if failed_dependencies(spec): testcase = TestCase(package.name, package.spec.short_spec, time=0.0) - testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed') + testcase.set_result( + TestResult.SKIPPED, + message='Skipped [failed dependencies]', + error_type='dep_failed') return testcase # Otherwise try to install the spec @@ -177,26 +188,30 @@ def install_single_spec(spec, number_of_jobs): testcase = TestCase(package.name, package.spec.short_spec, duration) testcase.set_result(TestResult.PASSED) except InstallError: - # An InstallError is considered a failure (the recipe didn't work correctly) + # An InstallError is considered a failure (the recipe didn't work + # correctly) duration = time.time() - start_time # Try to get the log lines = fetch_log(package.build_log_path) text = '\n'.join(lines) testcase = TestCase(package.name, package.spec.short_spec, duration) - testcase.set_result(TestResult.FAILED, message='Installation failure', text=text) + testcase.set_result(TestResult.FAILED, + message='Installation failure', text=text) except FetchError: # A FetchError is considered an error (we didn't even start building) duration = time.time() - start_time testcase = TestCase(package.name, package.spec.short_spec, duration) - testcase.set_result(TestResult.ERRORED, message='Unable to fetch package') + testcase.set_result(TestResult.ERRORED, + message='Unable to fetch package') return testcase def get_filename(args, top_spec): if not args.output: - fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash()) + fname = 'test-{x.name}-{x.version}-{hash}.xml'.format( + x=top_spec, hash=top_spec.dag_hash()) output_directory = join_path(os.getcwd(), 'test-output') if not os.path.exists(output_directory): os.mkdir(output_directory) diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index b9f2a449ae..bf7342f606 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -52,6 +52,7 @@ def setup_parser(subparser): class MockCache(object): + def store(self, copyCmd, relativeDst): pass @@ -60,6 +61,7 @@ class MockCache(object): class MockCacheFetcher(object): + def set_stage(self, stage): pass diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index dbe6cd6584..8957d1c908 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -50,25 +50,27 @@ def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="Remove regardless of whether other packages depend on this one.") + subparser.add_argument( '-a', '--all', action='store_true', dest='all', - help="USE CAREFULLY. Remove ALL installed packages that match each " + - "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501 - "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501 + help="USE CAREFULLY. Remove ALL installed packages that match each " + "supplied spec. i.e., if you say uninstall libelf, ALL versions " + "of libelf are uninstalled. This is both useful and dangerous, " + "like rm -r.") + subparser.add_argument( '-d', '--dependents', action='store_true', dest='dependents', - help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501 - ) + help='Also uninstall any packages that depend on the ones given ' + 'via command line.') + subparser.add_argument( '-y', '--yes-to-all', action='store_true', dest='yes_to_all', - help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 + help='Assume "yes" is the answer to every confirmation requested') - ) subparser.add_argument( 'packages', nargs=argparse.REMAINDER, - help="specs of packages to uninstall" - ) + help="specs of packages to uninstall") def concretize_specs(specs, allow_multiple_matches=False, force=False): diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py index 7bd15750ed..b52bedb7b4 100644 --- a/lib/spack/spack/cmd/unload.py +++ b/lib/spack/spack/cmd/unload.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Remove package from environment using module." +description = "Remove package from environment using module." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to unload with modules.') def unload(parser, args): diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py index 789a690e9c..6403cf6162 100644 --- a/lib/spack/spack/cmd/unuse.py +++ b/lib/spack/spack/cmd/unuse.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Remove package from environment using dotkit." +description = "Remove package from environment using dotkit." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to unuse with dotkit.') def unuse(parser, args): diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py index ce12a17d13..b8c7c95040 100644 --- a/lib/spack/spack/cmd/url-parse.py +++ b/lib/spack/spack/cmd/url-parse.py @@ -22,28 +22,28 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys - import llnl.util.tty as tty import spack import spack.url from spack.util.web import find_versions_of_archive -description = "Show parsing of a URL, optionally spider web for other versions." +description = "Show parsing of a URL, optionally spider web for versions." + def setup_parser(subparser): subparser.add_argument('url', help="url of a package archive") subparser.add_argument( - '-s', '--spider', action='store_true', help="Spider the source page for versions.") + '-s', '--spider', action='store_true', + help="Spider the source page for versions.") def print_name_and_version(url): name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url) - underlines = [" "] * max(ns+nl, vs+vl) - for i in range(ns, ns+nl): + underlines = [" "] * max(ns + nl, vs + vl) + for i in range(ns, ns + nl): underlines[i] = '-' - for i in range(vs, vs+vl): + for i in range(vs, vs + vl): underlines[i] = '~' print " %s" % url diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py index 2fe2019a22..f151581d7d 100644 --- a/lib/spack/spack/cmd/urls.py +++ b/lib/spack/spack/cmd/urls.py @@ -22,12 +22,12 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import spack import spack.url description = "Inspect urls used by packages in spack." + def setup_parser(subparser): subparser.add_argument( '-c', '--color', action='store_true', @@ -53,6 +53,7 @@ def urls(parser, args): for url in sorted(urls): if args.color or args.extrapolation: - print spack.url.color_url(url, subs=args.extrapolation, errors=True) + print spack.url.color_url( + url, subs=args.extrapolation, errors=True) else: print url diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py index bbb90fde1b..e3612ace48 100644 --- a/lib/spack/spack/cmd/use.py +++ b/lib/spack/spack/cmd/use.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Add package to environment using dotkit." +description = "Add package to environment using dotkit." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to use with dotkit.') def use(parser, args): diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py index ec3a4b2e34..1e95225ab8 100644 --- a/lib/spack/spack/cmd/versions.py +++ b/lib/spack/spack/cmd/versions.py @@ -22,15 +22,16 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from llnl.util.tty.colify import colify import llnl.util.tty as tty import spack -description ="List available versions of a package" +description = "List available versions of a package" + def setup_parser(subparser): - subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for') + subparser.add_argument('package', metavar='PACKAGE', + help='Package to list versions for') def versions(parser, args): diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index ce4555bc56..a77991e4dc 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -25,10 +25,8 @@ import os import re import itertools -from datetime import datetime import llnl.util.tty as tty -from llnl.util.lang import memoized from llnl.util.filesystem import join_path import spack.error @@ -37,10 +35,10 @@ import spack.architecture from spack.util.multiproc import parmap from spack.util.executable import * from spack.util.environment import get_path -from spack.version import Version __all__ = ['Compiler', 'get_compiler_version'] + def _verify_executables(*paths): for path in paths: if not os.path.isfile(path) and os.access(path, os.X_OK): @@ -49,8 +47,9 @@ def _verify_executables(*paths): _version_cache = {} + def get_compiler_version(compiler_path, version_arg, regex='(.*)'): - if not compiler_path in _version_cache: + if compiler_path not in _version_cache: compiler = Executable(compiler_path) output = compiler(version_arg, output=str, error=str) @@ -113,7 +112,7 @@ class Compiler(object): # Name of module used to switch versions of this compiler PrgEnv_compiler = None - def __init__(self, cspec, operating_system, + def __init__(self, cspec, operating_system, paths, modules=[], alias=None, **kwargs): def check(exe): if exe is None: @@ -130,11 +129,6 @@ class Compiler(object): else: self.fc = check(paths[3]) - #self.cc = check(cc) - #self.cxx = check(cxx) - #self.f77 = check(f77) - #self.fc = check(fc) - # Unfortunately have to make sure these params are accepted # in the same order they are returned by sorted(flags) # in compilers/__init__.py @@ -158,31 +152,30 @@ class Compiler(object): @property def openmp_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support OpenMP.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - + tty.die( + "The compiler you have chosen does not currently support OpenMP.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # This property should be overridden in the compiler subclass if # C++11 is supported by that compiler @property def cxx11_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support C++11.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - + tty.die( + "The compiler you have chosen does not currently support C++11.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # This property should be overridden in the compiler subclass if # C++14 is supported by that compiler @property def cxx14_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support C++14.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - - + tty.die( + "The compiler you have chosen does not currently support C++14.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # # Compiler classes have methods for querying the version of @@ -191,7 +184,6 @@ class Compiler(object): # Compiler *instances* are just data objects, and can only be # constructed from an actual set of executables. # - @classmethod def default_version(cls, cc): """Override just this to override all compiler version functions.""" @@ -258,16 +250,19 @@ class Compiler(object): version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: - tty.debug("Couldn't get version for compiler %s" % full_path, e) + tty.debug( + "Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. - tty.debug("Error while executing candidate compiler %s" % full_path, - "%s: %s" %(e.__class__.__name__, e)) + tty.debug("Error while executing candidate compiler %s" + % full_path, + "%s: %s" % (e.__class__.__name__, e)) return None - successful = [key for key in parmap(check, checks) if key is not None] + successful = [k for k in parmap(check, checks) if k is not None] + # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. @@ -278,20 +273,23 @@ class Compiler(object): """Return a string representation of the compiler toolchain.""" return self.__str__() - def __str__(self): """Return a string representation of the compiler toolchain.""" return "%s(%s)" % ( - self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system))))) + self.name, '\n '.join((str(s) for s in ( + self.cc, self.cxx, self.f77, self.fc, self.modules, + str(self.operating_system))))) class CompilerAccessError(spack.error.SpackError): + def __init__(self, path): super(CompilerAccessError, self).__init__( "'%s' is not a valid compiler." % path) class InvalidCompilerError(spack.error.SpackError): + def __init__(self): super(InvalidCompilerError, self).__init__( "Compiler has no executables.") diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 0ba94741da..eb866c8bbb 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -26,15 +26,9 @@ system and configuring Spack to use multiple compilers. """ import imp -import os import platform -import copy -import hashlib -import base64 -import yaml -import sys -from llnl.util.lang import memoized, list_modules +from llnl.util.lang import list_modules from llnl.util.filesystem import join_path import spack @@ -43,11 +37,7 @@ import spack.spec import spack.config import spack.architecture -from spack.util.multiproc import parmap -from spack.compiler import Compiler -from spack.util.executable import which from spack.util.naming import mod_to_class -from spack.util.environment import get_path _imported_compilers_module = 'spack.compilers' _path_instance_vars = ['cc', 'cxx', 'f77', 'fc'] @@ -73,7 +63,8 @@ def _to_dict(compiler): """Return a dict version of compiler suitable to insert in YAML.""" d = {} d['spec'] = str(compiler.spec) - d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars ) + d['paths'] = dict((attr, getattr(compiler, attr, None)) + for attr in _path_instance_vars) d['operating_system'] = str(compiler.operating_system) d['modules'] = compiler.modules if compiler.modules else [] @@ -140,15 +131,19 @@ def remove_compiler_from_config(compiler_spec, scope=None): - compiler_specs: a list of CompilerSpec objects. - scope: configuration scope to modify. """ + # Need a better way for this + global _cache_config_file + compiler_config = get_compiler_config(scope) config_length = len(compiler_config) - filtered_compiler_config = [comp for comp in compiler_config - if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] - # Need a better way for this - global _cache_config_file - _cache_config_file = filtered_compiler_config # Update the cache for changes - if len(filtered_compiler_config) == config_length: # No items removed + filtered_compiler_config = [ + comp for comp in compiler_config + if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] + + # Update the cache for changes + _cache_config_file = filtered_compiler_config + if len(filtered_compiler_config) == config_length: # No items removed CompilerSpecInsufficientlySpecificError(compiler_spec) spack.config.update_config('compilers', filtered_compiler_config, scope) @@ -158,7 +153,8 @@ def all_compilers_config(scope=None, init_config=True): available to build with. These are instances of CompilerSpec. """ # Get compilers for this architecture. - global _cache_config_file #Create a cache of the config file so we don't load all the time. + # Create a cache of the config file so we don't load all the time. + global _cache_config_file if not _cache_config_file: _cache_config_file = get_compiler_config(scope, init_config) return _cache_config_file @@ -236,7 +232,8 @@ def compilers_for_spec(compiler_spec, scope=None, **kwargs): continue items = items['compiler'] - if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)): + if not ('paths' in items and + all(n in items['paths'] for n in _path_instance_vars)): raise InvalidCompilerConfigurationError(cspec) cls = class_for_compiler_name(cspec.name) @@ -254,10 +251,10 @@ def compilers_for_spec(compiler_spec, scope=None, **kwargs): mods = [] if 'operating_system' in items: - operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform) + os = spack.architecture._operating_system_from_dict( + items['operating_system'], platform) else: - operating_system = None - + os = None alias = items['alias'] if 'alias' in items else None @@ -266,7 +263,8 @@ def compilers_for_spec(compiler_spec, scope=None, **kwargs): if f in items: flags[f] = items[f] - compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags)) + compilers.append( + cls(cspec, os, compiler_paths, mods, alias, **flags)) return compilers @@ -275,7 +273,6 @@ def compilers_for_spec(compiler_spec, scope=None, **kwargs): for cspec in matches: compilers.extend(get_compilers(cspec)) return compilers -# return [get_compilers(cspec) for cspec in matches] @_auto_compiler_spec @@ -285,8 +282,9 @@ def compiler_for_spec(compiler_spec, arch): operating_system = arch.platform_os assert(compiler_spec.concrete) - compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform) - if c.operating_system == operating_system] + compilers = [ + c for c in compilers_for_spec(compiler_spec, platform=arch.platform) + if c.operating_system == operating_system] if len(compilers) < 1: raise NoCompilerForSpecError(compiler_spec, operating_system) if len(compilers) > 1: @@ -321,11 +319,13 @@ def all_os_classes(): return classes + def all_compiler_types(): return [class_for_compiler_name(c) for c in supported_compilers()] class InvalidCompilerConfigurationError(spack.error.SpackError): + def __init__(self, compiler_spec): super(InvalidCompilerConfigurationError, self).__init__( "Invalid configuration for [compiler \"%s\"]: " % compiler_spec, @@ -335,14 +335,18 @@ class InvalidCompilerConfigurationError(spack.error.SpackError): class NoCompilersError(spack.error.SpackError): def __init__(self): - super(NoCompilersError, self).__init__("Spack could not find any compilers!") + super(NoCompilersError, self).__init__( + "Spack could not find any compilers!") + class NoCompilerForSpecError(spack.error.SpackError): def __init__(self, compiler_spec, target): - super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % ( - target, compiler_spec)) + super(NoCompilerForSpecError, self).__init__( + "No compilers for operating system %s satisfy spec %s" + % (target, compiler_spec)) + class CompilerSpecInsufficientlySpecificError(spack.error.SpackError): def __init__(self, compiler_spec): - super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s", - compiler_spec) + super(CompilerSpecInsufficientlySpecificError, self).__init__( + "Multiple compilers satisfy spec %s" % compiler_spec) diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index 00b406d820..4cf65222ae 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -29,6 +29,7 @@ from spack.util.executable import * import llnl.util.tty as tty from spack.version import ver + class Clang(Compiler): # Subclasses use possible names of C compiler cc_names = ['clang'] @@ -43,11 +44,12 @@ class Clang(Compiler): fc_names = [] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'clang/clang', - 'cxx' : 'clang/clang++', - # Use default wrappers for fortran, in case provided in compilers.yaml - 'f77' : 'f77', - 'fc' : 'f90' } + link_paths = {'cc': 'clang/clang', + 'cxx': 'clang/clang++', + # Use default wrappers for fortran, in case provided in + # compilers.yaml + 'f77': 'f77', + 'fc': 'f90'} @property def is_apple(self): diff --git a/lib/spack/spack/compilers/craype.py b/lib/spack/spack/compilers/craype.py index 4ba8b110ec..c92e5c131a 100644 --- a/lib/spack/spack/compilers/craype.py +++ b/lib/spack/spack/compilers/craype.py @@ -1,34 +1,33 @@ -##############################################################################} -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://scalability-llnl.github.io/spack +# For details, see https://github.com/llnl/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. +# conditions of the GNU Lesser General Public License for more details. # -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import llnl.util.tty as tty - -#from spack.build_environment import load_module from spack.compiler import * -#from spack.version import ver + class Craype(Compiler): + """Cray programming environment compiler.""" + # Subclasses use possible names of C compiler cc_names = ['cc'] @@ -47,12 +46,11 @@ class Craype(Compiler): PrgEnv = 'PrgEnv-cray' PrgEnv_compiler = 'craype' - link_paths = { 'cc' : 'cc', - 'cxx' : 'c++', - 'f77' : 'f77', - 'fc' : 'fc'} - + link_paths = {'cc': 'cc', + 'cxx': 'c++', + 'f77': 'f77', + 'fc': 'fc'} + @classmethod def default_version(cls, comp): return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)') - diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index 2fae6688db..a556f346d7 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -26,6 +26,7 @@ import llnl.util.tty as tty from spack.compiler import * from spack.version import ver + class Gcc(Compiler): # Subclasses use possible names of C compiler cc_names = ['gcc'] @@ -44,10 +45,10 @@ class Gcc(Compiler): suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d'] # Named wrapper links within spack.build_env_path - link_paths = {'cc' : 'gcc/gcc', - 'cxx' : 'gcc/g++', - 'f77' : 'gcc/gfortran', - 'fc' : 'gcc/gfortran' } + link_paths = {'cc': 'gcc/gcc', + 'cxx': 'gcc/g++', + 'f77': 'gcc/gfortran', + 'fc': 'gcc/gfortran'} PrgEnv = 'PrgEnv-gnu' PrgEnv_compiler = 'gcc' @@ -79,7 +80,6 @@ class Gcc(Compiler): # older gfortran versions don't have simple dumpversion output. r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)') - @classmethod def f77_version(cls, f77): return cls.fc_version(f77) diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index 6cad03ff47..8531ecd19a 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -26,6 +26,7 @@ from spack.compiler import * import llnl.util.tty as tty from spack.version import ver + class Intel(Compiler): # Subclasses use possible names of C compiler cc_names = ['icc'] @@ -40,10 +41,10 @@ class Intel(Compiler): fc_names = ['ifort'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'intel/icc', - 'cxx' : 'intel/icpc', - 'f77' : 'intel/ifort', - 'fc' : 'intel/ifort' } + link_paths = {'cc': 'intel/icc', + 'cxx': 'intel/icpc', + 'f77': 'intel/ifort', + 'fc': 'intel/ifort'} PrgEnv = 'PrgEnv-intel' PrgEnv_compiler = 'intel' @@ -64,7 +65,6 @@ class Intel(Compiler): else: return "-std=c++11" - @classmethod def default_version(cls, comp): """The '--version' option seems to be the most consistent one diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py index cee11bc97a..fdfc078b5e 100644 --- a/lib/spack/spack/compilers/nag.py +++ b/lib/spack/spack/compilers/nag.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * -import llnl.util.tty as tty + class Nag(Compiler): # Subclasses use possible names of C compiler @@ -39,11 +39,12 @@ class Nag(Compiler): fc_names = ['nagfor'] # Named wrapper links within spack.build_env_path - link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml - 'cc' : 'cc', - 'cxx' : 'c++', - 'f77' : 'nag/nagfor', - 'fc' : 'nag/nagfor' } + # Use default wrappers for C and C++, in case provided in compilers.yaml + link_paths = { + 'cc': 'cc', + 'cxx': 'c++', + 'f77': 'nag/nagfor', + 'fc': 'nag/nagfor'} @property def openmp_flag(self): @@ -71,9 +72,8 @@ class Nag(Compiler): """The '-V' option works for nag compilers. Output looks like this:: - NAG Fortran Compiler Release 6.0(Hibiya) Build 1037 - Product NPL6A60NA for x86-64 Linux - Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K. + NAG Fortran Compiler Release 6.0(Hibiya) Build 1037 + Product NPL6A60NA for x86-64 Linux """ return get_compiler_version( comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)') diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py index 6d36d8bfa6..0e4be6e9ba 100644 --- a/lib/spack/spack/compilers/pgi.py +++ b/lib/spack/spack/compilers/pgi.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * -import llnl.util.tty as tty + class Pgi(Compiler): # Subclasses use possible names of C compiler @@ -39,17 +39,14 @@ class Pgi(Compiler): fc_names = ['pgfortran', 'pgf95', 'pgf90'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'pgi/pgcc', - 'cxx' : 'pgi/pgc++', - 'f77' : 'pgi/pgfortran', - 'fc' : 'pgi/pgfortran' } - - + link_paths = {'cc': 'pgi/pgcc', + 'cxx': 'pgi/pgc++', + 'f77': 'pgi/pgfortran', + 'fc': 'pgi/pgfortran'} PrgEnv = 'PrgEnv-pgi' PrgEnv_compiler = 'pgi' - @property def openmp_flag(self): return "-mp" diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py index b1431436ad..5c83209781 100644 --- a/lib/spack/spack/compilers/xl.py +++ b/lib/spack/spack/compilers/xl.py @@ -26,24 +26,26 @@ from spack.compiler import * import llnl.util.tty as tty from spack.version import ver + class Xl(Compiler): # Subclasses use possible names of C compiler - cc_names = ['xlc','xlc_r'] + cc_names = ['xlc', 'xlc_r'] # Subclasses use possible names of C++ compiler - cxx_names = ['xlC','xlC_r','xlc++','xlc++_r'] + cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r'] # Subclasses use possible names of Fortran 77 compiler - f77_names = ['xlf','xlf_r'] + f77_names = ['xlf', 'xlf_r'] # Subclasses use possible names of Fortran 90 compiler - fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r'] + fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r', + 'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'xl/xlc', - 'cxx' : 'xl/xlc++', - 'f77' : 'xl/xlf', - 'fc' : 'xl/xlf90' } + link_paths = {'cc': 'xl/xlc', + 'cxx': 'xl/xlc++', + 'f77': 'xl/xlf', + 'fc': 'xl/xlf90'} @property def openmp_flag(self): @@ -56,7 +58,6 @@ class Xl(Compiler): else: return "-qlanglvl=extended0x" - @classmethod def default_version(cls, comp): """The '-qversion' is the standard option fo XL compilers. @@ -82,29 +83,28 @@ class Xl(Compiler): """ return get_compiler_version( - comp, '-qversion',r'([0-9]?[0-9]\.[0-9])') - + comp, '-qversion', r'([0-9]?[0-9]\.[0-9])') @classmethod def fc_version(cls, fc): - """The fortran and C/C++ versions of the XL compiler are always two units apart. - By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1. - Having such a difference in version number is confusing spack quite a lot. - Most notably if you keep the versions as is the default xl compiler will only - have fortran and no C/C++. - So we associate the Fortran compiler with the version associated to the C/C++ - compiler. - One last stumble. Version numbers over 10 have at least a .1 those under 10 - a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can - such a compiler mix and possibly older version of AIX and linux on power. + """The fortran and C/C++ versions of the XL compiler are always + two units apart. By this we mean that the fortran release that + goes with XL C/C++ 11.1 is 13.1. Having such a difference in + version number is confusing spack quite a lot. Most notably + if you keep the versions as is the default xl compiler will + only have fortran and no C/C++. So we associate the Fortran + compiler with the version associated to the C/C++ compiler. + One last stumble. Version numbers over 10 have at least a .1 + those under 10 a .0. There is no xlf 9.x or under currently + available. BG/P and BG/L can such a compiler mix and possibly + older version of AIX and linux on power. """ - fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])') + fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])') cver = float(fver) - 2 - if cver < 10 : - cver = cver - 0.1 + if cver < 10: + cver = cver - 0.1 return str(cver) - @classmethod def f77_version(cls, f77): return cls.fc_version(f77) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 6f11c86ce8..726dee62e3 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -61,7 +61,9 @@ class DefaultConcretizer(object): if not providers: raise UnsatisfiableProviderSpecError(providers[0], spec) spec_w_preferred_providers = find_spec( - spec, lambda x: spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) # NOQA: ignore=E501 + spec, + lambda x: spack.pkgsort.spec_has_preferred_provider( + x.name, spec.name)) if not spec_w_preferred_providers: spec_w_preferred_providers = spec provider_cmp = partial(spack.pkgsort.provider_compare, @@ -495,7 +497,8 @@ class UnavailableCompilerVersionError(spack.error.SpackError): def __init__(self, compiler_spec, operating_system): super(UnavailableCompilerVersionError, self).__init__( - "No available compiler version matches '%s' on operating_system %s" % (compiler_spec, operating_system), # NOQA: ignore=E501 + "No available compiler version matches '%s' on operating_system %s" + % (compiler_spec, operating_system), "Run 'spack compilers' to see available compiler Options.") @@ -506,14 +509,15 @@ class NoValidVersionError(spack.error.SpackError): def __init__(self, spec): super(NoValidVersionError, self).__init__( - "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) # NOQA: ignore=E501 + "There are no valid versions for %s that match '%s'" + % (spec.name, spec.versions)) class NoBuildError(spack.error.SpackError): - """Raised when a package is configured with the buildable option False, but no satisfactory external versions can be found""" def __init__(self, spec): - super(NoBuildError, self).__init__( - "The spec '%s' is configured as not buildable,and no matching external installs were found" % spec.name) # NOQA: ignore=E501 + msg = ("The spec '%s' is configured as not buildable, " + "and no matching external installs were found") + super(NoBuildError, self).__init__(msg % spec.name) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index a4e274893c..a4a4f5411e 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -158,35 +158,35 @@ section_schemas = { 'required': ['cc', 'cxx', 'f77', 'fc'], 'additionalProperties': False, 'properties': { - 'cc': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cxx': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'f77': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'fc': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cxxflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'fflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cppflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'ldflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'ldlibs': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}}}, - 'spec': { 'type': 'string'}, - 'operating_system': { 'type': 'string'}, - 'alias': { 'anyOf': [ {'type' : 'string'}, - {'type' : 'null' }]}, - 'modules': { 'anyOf': [ {'type' : 'string'}, - {'type' : 'null' }, - {'type': 'array'}, - ]} - },},},},},}, + 'cc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxx': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'f77': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxxflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cppflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldlibs': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}}}, + 'spec': {'type': 'string'}, + 'operating_system': {'type': 'string'}, + 'alias': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'modules': {'anyOf': [{'type': 'string'}, + {'type': 'null'}, + {'type': 'array'}, + ]} + }, }, }, }, }, }, 'mirrors': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack mirror configuration file schema', @@ -199,7 +199,7 @@ section_schemas = { 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type': 'string'},},},},}, + 'type': 'string'}, }, }, }, }, 'repos': { '$schema': 'http://json-schema.org/schema#', @@ -211,7 +211,7 @@ section_schemas = { 'type': 'array', 'default': [], 'items': { - 'type': 'string'},},},}, + 'type': 'string'}, }, }, }, 'packages': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack package configuration file schema', @@ -223,48 +223,48 @@ section_schemas = { 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # package name + r'\w[\w-]*': { # package name 'type': 'object', 'default': {}, 'additionalProperties': False, 'properties': { 'version': { - 'type' : 'array', - 'default' : [], - 'items' : { 'anyOf' : [ { 'type' : 'string' }, - { 'type' : 'number'}]}}, #version strings + 'type': 'array', + 'default': [], + 'items': {'anyOf': [{'type': 'string'}, + {'type': 'number'}]}}, # version strings 'compiler': { - 'type' : 'array', - 'default' : [], - 'items' : { 'type' : 'string' } }, #compiler specs + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}}, # compiler specs 'buildable': { 'type': 'boolean', 'default': True, - }, + }, 'modules': { - 'type' : 'object', - 'default' : {}, - }, + 'type': 'object', + 'default': {}, + }, 'providers': { 'type': 'object', 'default': {}, 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type' : 'array', - 'default' : [], - 'items' : { 'type' : 'string' },},},}, + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}, }, }, }, 'paths': { - 'type' : 'object', - 'default' : {}, - }, + 'type': 'object', + 'default': {}, + }, 'variants': { - 'oneOf' : [ - { 'type' : 'string' }, - { 'type' : 'array', - 'items' : { 'type' : 'string' } }, - ], }, - },},},},},}, + 'oneOf': [ + {'type': 'string'}, + {'type': 'array', + 'items': {'type': 'string'}}, + ], }, + }, }, }, }, }, }, 'targets': { '$schema': 'http://json-schema.org/schema#', @@ -277,8 +277,8 @@ section_schemas = { 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # target name - 'type': 'string' ,},},},},}, + r'\w[\w-]*': { # target name + 'type': 'string', }, }, }, }, }, 'modules': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack module file configuration file schema', @@ -389,13 +389,15 @@ section_schemas = { }, 'tcl': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration'}, # Base configuration + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, {} # Specific tcl extensions ] }, 'dotkit': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration'}, # Base configuration + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, {} # Specific dotkit extensions ] }, @@ -428,7 +430,8 @@ def extend_with_default(validator_class): """ validate_properties = validator_class.VALIDATORS["properties"] - validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] + validate_pattern_properties = validator_class.VALIDATORS[ + "patternProperties"] def set_defaults(validator, properties, instance, schema): for property, subschema in properties.iteritems(): @@ -510,7 +513,8 @@ class ConfigScope(object): except jsonschema.ValidationError as e: raise ConfigSanityError(e, data) except (yaml.YAMLError, IOError) as e: - raise ConfigFileError("Error writing to config file: '%s'" % str(e)) + raise ConfigFileError( + "Error writing to config file: '%s'" % str(e)) def clear(self): """Empty cached config information.""" @@ -739,7 +743,8 @@ def spec_externals(spec): path = get_path_from_module(module) - external_spec = spack.spec.Spec(external_spec, external=path, external_module=module) + external_spec = spack.spec.Spec( + external_spec, external=path, external_module=module) if external_spec.satisfies(spec): external_specs.append(external_spec) @@ -773,6 +778,7 @@ def get_path(path, data): class ConfigFormatError(ConfigError): """Raised when a configuration format does not match its schema.""" + def __init__(self, validation_error, data): # Try to get line number from erroneous instance and its parent instance_mark = getattr(validation_error.instance, '_start_mark', None) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 16814429dc..f3dcdef0a9 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -119,6 +119,7 @@ class InstallRecord(object): class Database(object): + def __init__(self, root, db_dir=None): """Create a Database for Spack installations under ``root``. @@ -600,6 +601,7 @@ class Database(object): class CorruptDatabaseError(SpackError): + def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( "Spack database is corrupt: %s. %s." % (path, msg), @@ -607,6 +609,7 @@ class CorruptDatabaseError(SpackError): class InvalidDatabaseVersionError(SpackError): + def __init__(self, expected, found): super(InvalidDatabaseVersionError, self).__init__( "Expected database version %s but found version %s." diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index e92dd6fb67..313bf48f0d 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -349,9 +349,10 @@ class CircularReferenceError(DirectiveError): class UnknownDependencyTypeError(DirectiveError): """This is raised when a dependency is of an unknown type.""" + def __init__(self, directive, package, deptype): super(UnknownDependencyTypeError, self).__init__( directive, - "Package '%s' cannot depend on a package via %s." % - (package, deptype)) + "Package '%s' cannot depend on a package via %s." + % (package, deptype)) self.package = package diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 8150a6da2b..0ae6f765f4 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -22,16 +22,13 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import re import os import exceptions -import hashlib import shutil import glob import tempfile import yaml -import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp import spack @@ -51,10 +48,10 @@ class DirectoryLayout(object): install, and they can use this to customize the nesting structure of spack installs. """ + def __init__(self, root): self.root = root - @property def hidden_file_paths(self): """Return a list of hidden files used by the directory layout. @@ -67,25 +64,21 @@ class DirectoryLayout(object): """ raise NotImplementedError() - def all_specs(self): """To be implemented by subclasses to traverse all specs for which there is a directory within the root. """ raise NotImplementedError() - def relative_path_for_spec(self, spec): """Implemented by subclasses to return a relative path from the install root to a unique location for the provided spec.""" raise NotImplementedError() - def create_install_directory(self, spec): """Creates the installation directory for a spec.""" raise NotImplementedError() - def check_installed(self, spec): """Checks whether a spec is installed. @@ -95,7 +88,6 @@ class DirectoryLayout(object): """ raise NotImplementedError() - def extension_map(self, spec): """Get a dict of currently installed extension packages for a spec. @@ -104,7 +96,6 @@ class DirectoryLayout(object): """ raise NotImplementedError() - def check_extension_conflict(self, spec, ext_spec): """Ensure that ext_spec can be activated in spec. @@ -113,7 +104,6 @@ class DirectoryLayout(object): """ raise NotImplementedError() - def check_activated(self, spec, ext_spec): """Ensure that ext_spec can be removed from spec. @@ -121,26 +111,22 @@ class DirectoryLayout(object): """ raise NotImplementedError() - def add_extension(self, spec, ext_spec): """Add to the list of currently installed extensions.""" raise NotImplementedError() - def remove_extension(self, spec, ext_spec): """Remove from the list of currently installed extensions.""" raise NotImplementedError() - def path_for_spec(self, spec): - """Return an absolute path from the root to a directory for the spec.""" + """Return absolute path from the root to a directory for the spec.""" _check_concrete(spec) path = self.relative_path_for_spec(spec) assert(not path.startswith(self.root)) return os.path.join(self.root, path) - def remove_install_directory(self, spec): """Removes a prefix and any empty parent directories from the root. Raised RemoveFailedError if something goes wrong. @@ -177,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout): only enabled variants are included in the install path. Disabled variants are omitted. """ + def __init__(self, root, **kwargs): super(YamlDirectoryLayout, self).__init__(root) self.metadata_dir = kwargs.get('metadata_dir', '.spack') @@ -191,12 +178,10 @@ class YamlDirectoryLayout(DirectoryLayout): # Cache of already written/read extension maps. self._extension_maps = {} - @property def hidden_file_paths(self): return (self.metadata_dir,) - def relative_path_for_spec(self, spec): _check_concrete(spec) @@ -208,20 +193,19 @@ class YamlDirectoryLayout(DirectoryLayout): spec.version, spec.dag_hash(self.hash_len)) - path = join_path(spec.architecture, + path = join_path( + spec.architecture, "%s-%s" % (spec.compiler.name, spec.compiler.version), dir_name) return path - def write_spec(self, spec, path): """Write a spec out to a file.""" _check_concrete(spec) with open(path, 'w') as f: spec.to_yaml(f) - def read_spec(self, path): """Read the contents of a file and parse them as a spec""" try: @@ -237,32 +221,26 @@ class YamlDirectoryLayout(DirectoryLayout): spec._mark_concrete() return spec - def spec_file_path(self, spec): """Gets full path to spec file""" _check_concrete(spec) return join_path(self.metadata_path(spec), self.spec_file_name) - def metadata_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir) - def build_log_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.build_log_name) - def build_env_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.build_env_name) - def build_packages_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.packages_dir) - def create_install_directory(self, spec): _check_concrete(spec) @@ -273,7 +251,6 @@ class YamlDirectoryLayout(DirectoryLayout): mkdirp(self.metadata_path(spec)) self.write_spec(spec, self.spec_file_path(spec)) - def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) @@ -284,7 +261,7 @@ class YamlDirectoryLayout(DirectoryLayout): if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( - 'Inconsistent state: install prefix exists but contains no spec.yaml:', + 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) @@ -297,7 +274,6 @@ class YamlDirectoryLayout(DirectoryLayout): raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path) - def all_specs(self): if not os.path.isdir(self.root): return [] @@ -307,20 +283,17 @@ class YamlDirectoryLayout(DirectoryLayout): spec_files = glob.glob(pattern) return [self.read_spec(s) for s in spec_files] - def specs_by_hash(self): by_hash = {} for spec in self.all_specs(): by_hash[spec.dag_hash()] = spec return by_hash - def extension_file_path(self, spec): """Gets full path to an installed package's extension file""" _check_concrete(spec) return join_path(self.metadata_path(spec), self.extension_file_name) - def _write_extensions(self, spec, extensions): path = self.extension_file_path(spec) @@ -332,23 +305,22 @@ class YamlDirectoryLayout(DirectoryLayout): # write tmp file with tmp: yaml.dump({ - 'extensions' : [ - { ext.name : { - 'hash' : ext.dag_hash(), - 'path' : str(ext.prefix) + 'extensions': [ + {ext.name: { + 'hash': ext.dag_hash(), + 'path': str(ext.prefix) }} for ext in sorted(extensions.values())] }, tmp, default_flow_style=False) # Atomic update by moving tmpfile on top of old one. os.rename(tmp.name, path) - def _extension_map(self, spec): """Get a dict<name -> spec> for all extensions currently installed for this package.""" _check_concrete(spec) - if not spec in self._extension_maps: + if spec not in self._extension_maps: path = self.extension_file_path(spec) if not os.path.exists(path): self._extension_maps[spec] = {} @@ -363,14 +335,14 @@ class YamlDirectoryLayout(DirectoryLayout): dag_hash = entry[name]['hash'] prefix = entry[name]['path'] - if not dag_hash in by_hash: + if dag_hash not in by_hash: raise InvalidExtensionSpecError( "Spec %s not found in %s" % (dag_hash, prefix)) ext_spec = by_hash[dag_hash] - if not prefix == ext_spec.prefix: + if prefix != ext_spec.prefix: raise InvalidExtensionSpecError( - "Prefix %s does not match spec with hash %s: %s" + "Prefix %s does not match spec hash %s: %s" % (prefix, dag_hash, ext_spec)) exts[ext_spec.name] = ext_spec @@ -378,13 +350,11 @@ class YamlDirectoryLayout(DirectoryLayout): return self._extension_maps[spec] - def extension_map(self, spec): """Defensive copying version of _extension_map() for external API.""" _check_concrete(spec) return self._extension_map(spec).copy() - def check_extension_conflict(self, spec, ext_spec): exts = self._extension_map(spec) if ext_spec.name in exts: @@ -394,13 +364,11 @@ class YamlDirectoryLayout(DirectoryLayout): else: raise ExtensionConflictError(spec, ext_spec, installed_spec) - def check_activated(self, spec, ext_spec): exts = self._extension_map(spec) - if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]): + if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]): raise NoSuchExtensionError(spec, ext_spec) - def add_extension(self, spec, ext_spec): _check_concrete(spec) _check_concrete(ext_spec) @@ -413,7 +381,6 @@ class YamlDirectoryLayout(DirectoryLayout): exts[ext_spec.name] = ext_spec self._write_extensions(spec, exts) - def remove_extension(self, spec, ext_spec): _check_concrete(spec) _check_concrete(ext_spec) @@ -429,12 +396,14 @@ class YamlDirectoryLayout(DirectoryLayout): class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" + def __init__(self, message, long_msg=None): super(DirectoryLayoutError, self).__init__(message, long_msg) class SpecHashCollisionError(DirectoryLayoutError): """Raised when there is a hash collision in an install layout.""" + def __init__(self, installed_spec, new_spec): super(SpecHashCollisionError, self).__init__( 'Specs %s and %s have the same SHA-1 prefix!' @@ -443,6 +412,7 @@ class SpecHashCollisionError(DirectoryLayoutError): class RemoveFailedError(DirectoryLayoutError): """Raised when a DirectoryLayout cannot remove an install prefix.""" + def __init__(self, installed_spec, prefix, error): super(RemoveFailedError, self).__init__( 'Could not remove prefix %s for %s : %s' @@ -452,12 +422,15 @@ class RemoveFailedError(DirectoryLayoutError): class InconsistentInstallDirectoryError(DirectoryLayoutError): """Raised when a package seems to be installed to the wrong place.""" + def __init__(self, message, long_msg=None): - super(InconsistentInstallDirectoryError, self).__init__(message, long_msg) + super(InconsistentInstallDirectoryError, self).__init__( + message, long_msg) class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): """Raised when create_install_directory is called unnecessarily.""" + def __init__(self, path): super(InstallDirectoryAlreadyExistsError, self).__init__( "Install path %s already exists!") @@ -473,22 +446,26 @@ class InvalidExtensionSpecError(DirectoryLayoutError): class ExtensionAlreadyInstalledError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, ext_spec): super(ExtensionAlreadyInstalledError, self).__init__( - "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec)) + "%s is already installed in %s" + % (ext_spec.short_spec, spec.short_spec)) class ExtensionConflictError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, ext_spec, conflict): super(ExtensionConflictError, self).__init__( - "%s cannot be installed in %s because it conflicts with %s"% ( - ext_spec.short_spec, spec.short_spec, conflict.short_spec)) + "%s cannot be installed in %s because it conflicts with %s" + % (ext_spec.short_spec, spec.short_spec, conflict.short_spec)) class NoSuchExtensionError(DirectoryLayoutError): """Raised when an extension isn't there on deactivate.""" + def __init__(self, spec, ext_spec): super(NoSuchExtensionError, self).__init__( - "%s cannot be removed from %s because it's not activated."% ( - ext_spec.short_spec, spec.short_spec)) + "%s cannot be removed from %s because it's not activated." + % (ext_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 41136ab2eb..613ece2f45 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1,4 +1,4 @@ -# +############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -21,7 +21,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# +############################################################################## import collections import inspect import json @@ -287,7 +287,10 @@ class EnvironmentModifications(object): shell = '{shell}'.format(**info) shell_options = '{shell_options}'.format(**info) source_file = '{source_command} {file} {concatenate_on_success}' - dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501 + + dump_cmd = "import os, json; print json.dumps(dict(os.environ))" + dump_environment = 'python -c "%s"' % dump_cmd + # Construct the command that will be executed command = [source_file.format(file=file, **info) for file in args] command.append(dump_environment) @@ -326,8 +329,10 @@ class EnvironmentModifications(object): for x in unset_variables: env.unset(x) # Variables that have been modified - common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501 - modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501 + common_variables = set( + this_environment).intersection(set(after_source_env)) + modified_variables = [x for x in common_variables + if this_environment[x] != after_source_env[x]] def return_separator_if_any(first_value, second_value): separators = ':', ';' @@ -405,7 +410,7 @@ def set_or_unset_not_first(variable, changes, errstream): if indexes: good = '\t \t{context} at {filename}:{lineno}' nogood = '\t--->\t{context} at {filename}:{lineno}' - message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501 + message = "Suspicious requests to set or unset '{var}' found" errstream(message.format(var=variable)) for ii, item in enumerate(changes): print_format = nogood if ii in indexes else good diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 85ad2fe249..c94875e91a 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -27,21 +27,21 @@ import sys import llnl.util.tty as tty import spack + class SpackError(Exception): """This is the superclass for all Spack errors. Subclasses can be found in the modules they have to do with. """ + def __init__(self, message, long_message=None): super(SpackError, self).__init__() self.message = message self._long_message = long_message - @property def long_message(self): return self._long_message - def die(self): if spack.debug: sys.excepthook(*sys.exc_info()) @@ -52,21 +52,23 @@ class SpackError(Exception): print self.long_message os._exit(1) - def __str__(self): msg = self.message if self._long_message: msg += "\n %s" % self._long_message return msg + class UnsupportedPlatformError(SpackError): """Raised by packages when a platform is not supported""" + def __init__(self, message): super(UnsupportedPlatformError, self).__init__(message) class NoNetworkConnectionError(SpackError): """Raised when an operation needs an internet connection.""" + def __init__(self, message, url): super(NoNetworkConnectionError, self).__init__( "No network connection: " + str(message), diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index bcb33bd0e6..c69a23033c 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -356,6 +356,7 @@ class URLFetchStrategy(FetchStrategy): class CacheURLFetchStrategy(URLFetchStrategy): """The resource associated with a cache URL may be out of date.""" + def __init__(self, *args, **kwargs): super(CacheURLFetchStrategy, self).__init__(*args, **kwargs) @@ -836,6 +837,7 @@ def for_package_version(pkg, version): class FsCache(object): + def __init__(self, root): self.root = os.path.abspath(root) diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py index fb9ccf46b8..0a66166fd8 100644 --- a/lib/spack/spack/file_cache.py +++ b/lib/spack/spack/file_cache.py @@ -41,6 +41,7 @@ class FileCache(object): client code need not manage locks for cache entries. """ + def __init__(self, root): """Create a file cache object. @@ -131,6 +132,7 @@ class FileCache(object): """ class WriteContextManager(object): + def __enter__(cm): cm.orig_filename = self.cache_path(key) cm.orig_file = None diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 80d1199ef5..b875e9da99 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -136,6 +136,7 @@ NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states class AsciiGraph(object): + def __init__(self): # These can be set after initialization or after a call to # graph() to change behavior. @@ -288,22 +289,22 @@ class AsciiGraph(object): self._indent() for p in prev_ends: - advance(p, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 - advance(p + 1, lambda: [("|/", self._pos)]) # NOQA: ignore=E272 + advance(p, lambda: [("| ", self._pos)]) + advance(p + 1, lambda: [("|/", self._pos)]) if end >= 0: - advance(end + 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 - advance(start - 1, lambda: [("|", self._pos), ("_", end)]) # NOQA: ignore=E272 + advance(end + 1, lambda: [("| ", self._pos)]) + advance(start - 1, lambda: [("|", self._pos), ("_", end)]) else: - advance(start - 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(start - 1, lambda: [("| ", self._pos)]) if start >= 0: - advance(start, lambda: [("|", self._pos), ("/", end)]) # NOQA: ignore=E272 + advance(start, lambda: [("|", self._pos), ("/", end)]) if collapse: - advance(flen, lambda: [(" /", self._pos)]) # NOQA: ignore=E272 + advance(flen, lambda: [(" /", self._pos)]) else: - advance(flen, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(flen, lambda: [("| ", self._pos)]) self._set_state(BACK_EDGE, end, label) self._out.write("\n") @@ -438,8 +439,8 @@ class AsciiGraph(object): # Expand forward after doing all back connections if (i + 1 < len(self._frontier) and - len(self._frontier[i + 1]) == 1 and - self._frontier[i + 1][0] in self._frontier[i]): + len(self._frontier[i + 1]) == 1 and + self._frontier[i + 1][0] in self._frontier[i]): # We need to connect to the element to the right. # Keep lines straight by connecting directly and # avoiding unnecessary expand/contract. diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 902e488eca..c7c84defa0 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -45,6 +45,7 @@ from llnl.util.lang import memoized, list_modules from llnl.util.filesystem import join_path import spack + @memoized def all_hook_modules(): modules = [] @@ -58,6 +59,7 @@ def all_hook_modules(): class HookRunner(object): + def __init__(self, hook_name): self.hook_name = hook_name diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index bcbd68dfa0..070b309a43 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -23,8 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import spack - def pre_uninstall(pkg): assert(pkg.spec.concrete) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 0bbcfba6b4..f053e4405f 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -40,9 +40,8 @@ import spack.error import spack.url as url import spack.fetch_strategy as fs from spack.spec import Spec -from spack.stage import Stage from spack.version import * -from spack.util.compression import extension, allowed_archive +from spack.util.compression import allowed_archive def mirror_archive_filename(spec, fetcher): @@ -52,10 +51,10 @@ def mirror_archive_filename(spec, fetcher): if isinstance(fetcher, fs.URLFetchStrategy): if fetcher.expand_archive: - # If we fetch this version with a URLFetchStrategy, use URL's archive type + # If we fetch with a URLFetchStrategy, use URL's archive type ext = url.downloaded_file_extension(fetcher.url) else: - # If the archive shouldn't be expanded, don't check for its extension. + # If the archive shouldn't be expanded, don't check extension. ext = None else: # Otherwise we'll make a .tar.gz ourselves @@ -106,7 +105,9 @@ def get_matching_versions(specs, **kwargs): def suggest_archive_basename(resource): """ - Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types. + Return a tentative basename for an archive. + + Raises an exception if the name is not an allowed archive type. :param fetcher: :return: @@ -170,7 +171,7 @@ def create(path, specs, **kwargs): 'error': [] } - # Iterate through packages and download all the safe tarballs for each of them + # Iterate through packages and download all safe tarballs for each for spec in version_specs: add_single_spec(spec, mirror_root, categories, **kwargs) @@ -190,12 +191,15 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version - archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) + archive_path = os.path.abspath(join_path( + mirror_root, mirror_archive_path(spec, fetcher))) name = spec.format("$_$@") else: resource = stage.resource - archive_path = join_path(subdir, suggest_archive_basename(resource)) - name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) + archive_path = join_path( + subdir, suggest_archive_basename(resource)) + name = "{resource} ({pkg}).".format( + resource=resource.name, pkg=spec.format("$_$@")) subdir = os.path.dirname(archive_path) mkdirp(subdir) @@ -217,15 +221,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): categories['present'].append(spec) else: categories['mirrored'].append(spec) + except Exception as e: if spack.debug: sys.excepthook(*sys.exc_info()) else: - tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) + tty.warn("Error while fetching %s" + % spec.format('$_$@'), e.message) categories['error'].append(spec) class MirrorError(spack.error.SpackError): """Superclass of all mirror-creation related errors.""" + def __init__(self, msg, long_msg=None): super(MirrorError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 8ac6a77d13..debc6752b4 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -459,7 +459,8 @@ class EnvModule(object): yield self.environment_modifications_formats[type( command)].format(**command.args) except KeyError: - message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501 + message = ('Cannot handle command of type {command}: ' + 'skipping request') details = '{context} at {filename}:{lineno}' tty.warn(message.format(command=type(command))) tty.warn(details.format(**command.args)) @@ -494,7 +495,8 @@ class Dotkit(EnvModule): autoload_format = 'dk_op {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + default_naming_format = \ + '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): @@ -543,7 +545,8 @@ class TclModule(EnvModule): prerequisite_format = 'prereq {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + default_naming_format = \ + '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): @@ -554,7 +557,7 @@ class TclModule(EnvModule): timestamp = datetime.datetime.now() # TCL Modulefile header header = '#%Module1.0\n' - header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501 + header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp header += '##\n' header += '## %s\n' % self.spec.short_spec header += '##\n' @@ -584,10 +587,12 @@ class TclModule(EnvModule): for naming_dir, conflict_dir in zip( self.naming_scheme.split('/'), item.split('/')): if naming_dir != conflict_dir: - message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501 + message = 'conflict scheme does not match naming ' + message += 'scheme [{spec}]\n\n' message += 'naming scheme : "{nformat}"\n' message += 'conflict scheme : "{cformat}"\n\n' - message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501 + message += '** You may want to check your ' + message += '`modules.yaml` configuration file **\n' tty.error(message.format(spec=self.spec, nformat=self.naming_scheme, cformat=item)) diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index 0818f9092f..d1d1f32445 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -43,15 +43,13 @@ avoids overly complicated rat nests of if statements. Obviously, depending on the scenario, regular old conditionals might be clearer, so package authors should use their judgement. """ -import sys import functools -import collections from llnl.util.lang import * import spack.architecture import spack.error -from spack.spec import parse_anonymous_spec, Spec +from spack.spec import parse_anonymous_spec class SpecMultiMethod(object): @@ -89,13 +87,13 @@ class SpecMultiMethod(object): See the docs for decorators below for more details. """ + def __init__(self, default=None): self.method_list = [] self.default = default if default: functools.update_wrapper(self, default) - def register(self, spec, method): """Register a version of a method for a particular sys_type.""" self.method_list.append((spec, method)) @@ -105,12 +103,10 @@ class SpecMultiMethod(object): else: assert(self.__name__ == method.__name__) - def __get__(self, obj, objtype): """This makes __call__ support instance methods.""" return functools.partial(self.__call__, obj) - def __call__(self, package_self, *args, **kwargs): """Find the first method with a spec that matches the package's spec. If none is found, call the default @@ -127,7 +123,6 @@ class SpecMultiMethod(object): type(package_self), self.__name__, spec, [m[0] for m in self.method_list]) - def __str__(self): return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % ( self.default, self.method_list) @@ -195,11 +190,13 @@ class when(object): platform-specific versions. There's not much we can do to get around this because of the way decorators work. """ + def __init__(self, spec): pkg = get_calling_module_name() if spec is True: spec = pkg - self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None + self.spec = (parse_anonymous_spec(spec, pkg) + if spec is not False else None) def __call__(self, method): # Get the first definition of the method in the calling scope @@ -218,12 +215,14 @@ class when(object): class MultiMethodError(spack.error.SpackError): """Superclass for multimethod dispatch errors""" + def __init__(self, message): super(MultiMethodError, self).__init__(message) class NoSuchMethodError(spack.error.SpackError): """Raised when we can't find a version of a multi-method.""" + def __init__(self, cls, method_name, spec, possible_specs): super(NoSuchMethodError, self).__init__( "Package %s does not support %s called with %s. Options are: %s" diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py index dbd2775861..78807865b3 100644 --- a/lib/spack/spack/operating_systems/cnl.py +++ b/lib/spack/spack/operating_systems/cnl.py @@ -15,6 +15,7 @@ class Cnl(OperatingSystem): modules. If updated, user must make sure that version and name are updated to indicate that OS has been upgraded (or downgraded) """ + def __init__(self): name = 'CNL' version = '10' diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py index 2e3c72719b..6d70ae80b6 100644 --- a/lib/spack/spack/operating_systems/linux_distro.py +++ b/lib/spack/spack/operating_systems/linux_distro.py @@ -2,6 +2,7 @@ import re import platform as py_platform from spack.architecture import OperatingSystem + class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of @@ -9,6 +10,7 @@ class LinuxDistro(OperatingSystem): autodetection using the python module platform and the method platform.dist() """ + def __init__(self): distname, version, _ = py_platform.linux_distribution( full_distribution_name=False) diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py index f35b3ca577..3e5ab9b2e9 100644 --- a/lib/spack/spack/operating_systems/mac_os.py +++ b/lib/spack/spack/operating_systems/mac_os.py @@ -1,6 +1,7 @@ import platform as py_platform from spack.architecture import OperatingSystem + class MacOs(OperatingSystem): """This class represents the macOS operating system. This will be auto detected using the python platform.mac_ver. The macOS diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 25e07541d0..ff8c8e96bc 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -34,6 +34,7 @@ rundown on spack and how it differs from homebrew, look at the README. """ import os +import sys import re import textwrap import time @@ -178,12 +179,10 @@ class Package(object): Most software comes in nicely packaged tarballs, like this one: http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz - Taking a page from homebrew, spack deduces pretty much everything it needs to know from the URL above. If you simply type this: spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz - Spack will download the tarball, generate an md5 hash, figure out the version and the name of the package from the URL, and create a new package file for you with all the names and attributes set correctly. @@ -705,13 +704,13 @@ class Package(object): # Ask the user whether to skip the checksum if we're # interactive, but just fail if non-interactive. - checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501 + ck_msg = "Add a checksum or use --no-checksum to skip this check." ignore_checksum = False if sys.stdout.isatty(): ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False) if ignore_checksum: - tty.msg("Fetching with no checksum.", checksum_msg) + tty.msg("Fetching with no checksum.", ck_msg) if not ignore_checksum: raise FetchError("Will not fetch %s" % @@ -1305,9 +1304,10 @@ class Package(object): continue for dep in aspec.traverse(deptype='run'): if self.spec == dep: + msg = ("Cannot deactivate %s because %s is activated " + "and depends on it.") raise ActivationError( - "Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501 - % (self.spec.short_spec, aspec.short_spec)) + msg % (self.spec.short_spec, aspec.short_spec)) self.extendee_spec.package.deactivate(self, **self.extendee_args) @@ -1564,6 +1564,7 @@ def make_executable(path): class CMakePackage(StagedPackage): + def make_make(self): import multiprocessing # number of jobs spack will to build with. @@ -1740,12 +1741,14 @@ class ExtensionError(PackageError): class ExtensionConflictError(ExtensionError): + def __init__(self, path): super(ExtensionConflictError, self).__init__( "Extension blocked by file: %s" % path) class ActivationError(ExtensionError): + def __init__(self, msg, long_msg=None): super(ActivationError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py index 8adf957e7f..1b88db2d7c 100644 --- a/lib/spack/spack/parse.py +++ b/lib/spack/spack/parse.py @@ -29,6 +29,7 @@ import spack.error class Token: """Represents tokens; generated from input by lexer and fed to parse().""" + def __init__(self, type, value='', start=0, end=0): self.type = type self.value = value @@ -51,11 +52,13 @@ class Token: class Lexer(object): """Base class for Lexers that keep track of line numbers.""" + def __init__(self, lexicon): self.scanner = re.Scanner(lexicon) def token(self, type, value=''): - return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0)) + return Token(type, value, + self.scanner.match.start(0), self.scanner.match.end(0)) def lex(self, text): tokens, remainder = self.scanner.scan(text) @@ -66,10 +69,11 @@ class Lexer(object): class Parser(object): """Base class for simple recursive descent parsers.""" + def __init__(self, lexer): - self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty. - self.token = Token(None) # last accepted token starts at beginning of file - self.next = None # next token + self.tokens = iter([]) # iterators over tokens, handled in order. + self.token = Token(None) # last accepted token + self.next = None # next token self.lexer = lexer self.text = None @@ -82,11 +86,12 @@ class Parser(object): def push_tokens(self, iterable): """Adds all tokens in some iterable to the token stream.""" - self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens) + self.tokens = itertools.chain( + iter(iterable), iter([self.next]), self.tokens) self.gettok() def accept(self, id): - """Puts the next symbol in self.token if we like it. Then calls gettok()""" + """Put the next symbol in self.token if accepted, then call gettok()""" if self.next and self.next.is_a(id): self.token = self.next self.gettok() @@ -124,9 +129,9 @@ class Parser(object): return self.do_parse() - class ParseError(spack.error.SpackError): """Raised when we don't hit an error while parsing.""" + def __init__(self, message, string, pos): super(ParseError, self).__init__(message) self.string = string @@ -135,5 +140,6 @@ class ParseError(spack.error.SpackError): class LexError(ParseError): """Raised when we don't know how to lex something.""" + def __init__(self, message, string, pos): super(LexError, self).__init__(message, string, pos) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index c2e181be2f..0bd9f5d29d 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -24,7 +24,6 @@ ############################################################################## import os -import llnl.util.tty as tty from llnl.util.filesystem import join_path import spack @@ -59,7 +58,6 @@ class Patch(object): if not os.path.isfile(self.path): raise NoSuchPatchFileError(pkg_name, self.path) - def apply(self, stage): """Fetch this patch, if necessary, and apply it to the source code in the supplied stage. @@ -84,9 +82,9 @@ class Patch(object): patch_stage.destroy() - class NoSuchPatchFileError(spack.error.SpackError): """Raised when user specifies a patch file that doesn't exist.""" + def __init__(self, package, path): super(NoSuchPatchFileError, self).__init__( "No such patch file for package %s: %s" % (package, path)) diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py index e0eb76f336..91afdd04db 100644 --- a/lib/spack/spack/platforms/bgq.py +++ b/lib/spack/spack/platforms/bgq.py @@ -1,6 +1,7 @@ import os from spack.architecture import Platform, Target + class Bgq(Platform): priority = 30 front_end = 'power7' @@ -15,4 +16,3 @@ class Bgq(Platform): @classmethod def detect(self): return os.path.exists('/bgsys') - diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py index d47dd640f9..974ce3a3f9 100644 --- a/lib/spack/spack/platforms/darwin.py +++ b/lib/spack/spack/platforms/darwin.py @@ -2,6 +2,7 @@ import subprocess from spack.architecture import Platform, Target from spack.operating_systems.mac_os import MacOs + class Darwin(Platform): priority = 89 front_end = 'x86_64' @@ -21,6 +22,6 @@ class Darwin(Platform): @classmethod def detect(self): - platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE) + platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE) platform, _ = platform.communicate() return 'darwin' in platform.strip().lower() diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py index 4d3f59c320..38d2cdbfec 100644 --- a/lib/spack/spack/platforms/linux.py +++ b/lib/spack/spack/platforms/linux.py @@ -3,6 +3,7 @@ import platform from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro + class Linux(Platform): priority = 90 @@ -26,6 +27,6 @@ class Linux(Platform): @classmethod def detect(self): - platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE) + platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE) platform, _ = platform.communicate() return 'linux' in platform.strip().lower() diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py index 8fa2585a7a..c918211555 100644 --- a/lib/spack/spack/platforms/test.py +++ b/lib/spack/spack/platforms/test.py @@ -1,4 +1,27 @@ -import subprocess +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl @@ -9,7 +32,7 @@ class Test(Platform): front_end = 'x86_32' back_end = 'x86_64' default = 'x86_64' - + back_os = 'CNL10' default_os = 'CNL10' diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index f079c1ef8b..45a41c8e2b 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -156,7 +156,7 @@ class PreferredPackages(object): """Return True iff the named package has a list of preferred providers""" return bool(self._order_for_package(pkgname, 'providers', - provider_str, False)) + provider_str, False)) def spec_preferred_variants(self, pkgname): """Return a VariantMap of preferred variants and their values""" diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index b5fbb67c6e..3f9cd285e7 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -52,6 +52,7 @@ class ProviderIndex(object): matching implementation of MPI. """ + def __init__(self, specs=None, restrict=False): """Create a new ProviderIndex. diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index d751a98b35..2d8dc39648 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -68,6 +68,7 @@ NOT_PROVIDED = object() def _autospec(function): """Decorator that automatically converts the argument of a single-arg function to a Spec.""" + def converter(self, spec_like, *args, **kwargs): if not isinstance(spec_like, spack.spec.Spec): spec_like = spack.spec.Spec(spec_like) @@ -77,6 +78,7 @@ def _autospec(function): class SpackNamespace(ModuleType): """ Allow lazy loading of modules.""" + def __init__(self, namespace): super(SpackNamespace, self).__init__(namespace) self.__file__ = "(spack namespace)" @@ -112,6 +114,7 @@ class RepoPath(object): combined results of the Repos in its list instead of on a single package repository. """ + def __init__(self, *repo_dirs, **kwargs): # super-namespace for all packages in the RepoPath self.super_namespace = kwargs.get('namespace', repo_namespace) @@ -360,6 +363,7 @@ class Repo(object): A Python namespace where the repository's packages should live. """ + def __init__(self, root, namespace=repo_namespace): """Instantiate a package repository from a filesystem path. @@ -923,6 +927,7 @@ class PackageLoadError(spack.error.SpackError): class UnknownPackageError(PackageLoadError): """Raised when we encounter a package spack doesn't have.""" + def __init__(self, name, repo=None): msg = None if repo: @@ -935,6 +940,7 @@ class UnknownPackageError(PackageLoadError): class UnknownNamespaceError(PackageLoadError): """Raised when we encounter an unknown namespace""" + def __init__(self, namespace): super(UnknownNamespaceError, self).__init__( "Unknown namespace: %s" % namespace) @@ -942,6 +948,7 @@ class UnknownNamespaceError(PackageLoadError): class FailedConstructorError(PackageLoadError): """Raised when a package's class constructor fails.""" + def __init__(self, name, exc_type, exc_obj, exc_tb): super(FailedConstructorError, self).__init__( "Class constructor failed for package '%s'." % name, diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py index 24b675f8da..1d4d448298 100644 --- a/lib/spack/spack/resource.py +++ b/lib/spack/spack/resource.py @@ -31,9 +31,11 @@ package to enable optional features. class Resource(object): + """Represents an optional resource to be fetched by a package. + + Aggregates a name, a fetcher, a destination and a placement. """ - Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement - """ + def __init__(self, name, fetcher, destination, placement): self.name = name self.fetcher = fetcher diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a37b39be67..0d72d454c6 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -166,6 +166,7 @@ def colorize_spec(spec): """Returns a spec colorized according to the colors specified in color_formats.""" class insert_color: + def __init__(self): self.last = None @@ -186,6 +187,7 @@ class CompilerSpec(object): """The CompilerSpec field represents the compiler or range of compiler versions that a package should be built with. CompilerSpecs have a name and a version list. """ + def __init__(self, *args): nargs = len(args) if nargs == 1: @@ -296,6 +298,7 @@ class DependencySpec(object): - spec: the spack.spec.Spec description of a dependency. - deptypes: strings representing the type of dependency this is. """ + def __init__(self, spec, deptypes): self.spec = spec self.deptypes = deptypes @@ -317,6 +320,7 @@ class VariantSpec(object): on the particular package being built, and each named variant can be enabled or disabled. """ + def __init__(self, name, value): self.name = name self.value = value @@ -447,9 +451,9 @@ class FlagMap(HashableMap): sorted_keys = filter( lambda flag: self[flag] != [], sorted(self.keys())) cond_symbol = ' ' if len(sorted_keys) > 0 else '' - return cond_symbol + ' '.join(str(key) + '=\"' + ' '.join(str(f) - for f in self[key]) + '\"' - for key in sorted_keys) + return cond_symbol + ' '.join( + str(key) + '=\"' + ' '.join( + str(f) for f in self[key]) + '\"' for key in sorted_keys) class DependencyMap(HashableMap): @@ -910,7 +914,7 @@ class Spec(object): params = dict((name, v.value) for name, v in self.variants.items()) params.update(dict((name, value) - for name, value in self.compiler_flags.items())) + for name, value in self.compiler_flags.items())) if params: d['parameters'] = params @@ -1598,8 +1602,8 @@ class Spec(object): raise UnsatisfiableSpecNameError(self.name, other.name) if (other.namespace is not None and - self.namespace is not None and - other.namespace != self.namespace): + self.namespace is not None and + other.namespace != self.namespace): raise UnsatisfiableSpecNameError(self.fullname, other.fullname) if not self.versions.overlaps(other.versions): @@ -1753,8 +1757,8 @@ class Spec(object): # namespaces either match, or other doesn't require one. if (other.namespace is not None and - self.namespace is not None and - self.namespace != other.namespace): + self.namespace is not None and + self.namespace != other.namespace): return False if self.versions and other.versions: if not self.versions.satisfies(other.versions, strict=strict): @@ -1849,7 +1853,7 @@ class Spec(object): # compatible with mpich2) for spec in self.virtual_dependencies(): if (spec.name in other_index and - not other_index.providers_for(spec)): + not other_index.providers_for(spec)): return False for spec in other.virtual_dependencies(): @@ -2345,6 +2349,7 @@ _lexer = SpecLexer() class SpecParser(spack.parse.Parser): + def __init__(self): super(SpecParser, self).__init__(_lexer) self.previous = None diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index b8441bdac4..22ddd4c97e 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -40,6 +40,7 @@ from spack.test.mock_packages_test import * class ArchitectureTest(MockPackagesTest): + def setUp(self): super(ArchitectureTest, self).setUp() self.platform = spack.architecture.platform() diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index ea2b164462..f3e4bb31d2 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -45,7 +45,8 @@ test_command = [ '-llib1', '-llib2', 'arg4', '-Wl,--end-group', - '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath', + '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', + '-rpath', '-Xlinker', '/fourth/rpath', '-llib3', '-llib4', 'arg5', 'arg6'] @@ -67,7 +68,7 @@ class CompilerTest(unittest.TestCase): os.environ['SPACK_FC'] = self.realcc os.environ['SPACK_PREFIX'] = self.prefix - os.environ['SPACK_ENV_PATH']="test" + os.environ['SPACK_ENV_PATH'] = "test" os.environ['SPACK_DEBUG_LOG_DIR'] = "." os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7" os.environ['SPACK_SHORT_SPEC'] = "foo@1.2" @@ -97,16 +98,13 @@ class CompilerTest(unittest.TestCase): if 'SPACK_DEPENDENCIES' in os.environ: del os.environ['SPACK_DEPENDENCIES'] - def tearDown(self): shutil.rmtree(self.tmp_deps, True) - def check_cc(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cc(*args, output=str).strip(), expected) - def check_cxx(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cxx(*args, output=str).strip(), expected) @@ -115,46 +113,46 @@ class CompilerTest(unittest.TestCase): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.fc(*args, output=str).strip(), expected) - def check_ld(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.ld(*args, output=str).strip(), expected) - def check_cpp(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cpp(*args, output=str).strip(), expected) - def test_vcheck_mode(self): self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck") self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck") - self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck") - + self.check_cc('dump-mode', ['-I/include', + '-V', '-o', 'output'], "vcheck") def test_cpp_mode(self): self.check_cc('dump-mode', ['-E'], "cpp") self.check_cpp('dump-mode', [], "cpp") - def test_as_mode(self): self.check_cc('dump-mode', ['-S'], "as") - def test_ccld_mode(self): self.check_cc('dump-mode', [], "ccld") self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld") - self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") - self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") - + self.check_cc('dump-mode', ['foo.c', '-o', + 'foo', '-Wl,-rpath,foo'], "ccld") + self.check_cc( + 'dump-mode', + ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], + "ccld") def test_ld_mode(self): self.check_ld('dump-mode', [], "ld") - self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld") - + self.check_ld( + 'dump-mode', + ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], + "ld") def test_flags(self): os.environ['SPACK_LDFLAGS'] = '-L foo' @@ -176,10 +174,11 @@ class CompilerTest(unittest.TestCase): # Test cppflags added properly in cpp mode self.check_cpp('dump-args', test_command, "cpp " + - '-g -O1 ' + - ' '.join(test_command)) + '-g -O1 ' + + ' '.join(test_command)) - # Test ldflags, cppflags, and language specific flags are added in proper order + # Test ldflags, cppflags, and language specific flags are added in + # proper order self.check_cc('dump-args', test_command, self.realcc + ' ' + '-Wl,-rpath,' + self.prefix + '/lib ' + @@ -191,14 +190,14 @@ class CompilerTest(unittest.TestCase): '-lfoo') self.check_cxx('dump-args', test_command, - self.realcc + ' ' + - '-Wl,-rpath,' + self.prefix + '/lib ' + - '-Wl,-rpath,' + self.prefix + '/lib64 ' + - '-g -O1 ' + - '-Werror ' + - '-L foo ' + - ' '.join(test_command) + ' ' + - '-lfoo') + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-g -O1 ' + + '-Werror ' + + '-L foo ' + + ' '.join(test_command) + ' ' + + '-lfoo') self.check_fc('dump-args', test_command, self.realcc + ' ' + @@ -210,9 +209,8 @@ class CompilerTest(unittest.TestCase): ' '.join(test_command) + ' ' + '-lfoo') - os.environ['SPACK_LDFLAGS']='' - os.environ['SPACK_LDLIBS']='' - + os.environ['SPACK_LDFLAGS'] = '' + os.environ['SPACK_LDLIBS'] = '' def test_dep_rpath(self): """Ensure RPATHs for root package are added.""" @@ -222,7 +220,6 @@ class CompilerTest(unittest.TestCase): '-Wl,-rpath,' + self.prefix + '/lib64 ' + ' '.join(test_command)) - def test_dep_include(self): """Ensure a single dependency include directory is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep4 @@ -233,7 +230,6 @@ class CompilerTest(unittest.TestCase): '-I' + self.dep4 + '/include ' + ' '.join(test_command)) - def test_dep_lib(self): """Ensure a single dependency RPATH is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep2 @@ -245,7 +241,6 @@ class CompilerTest(unittest.TestCase): '-Wl,-rpath,' + self.dep2 + '/lib64 ' + ' '.join(test_command)) - def test_all_deps(self): """Ensure includes and RPATHs for all deps are added. """ os.environ['SPACK_DEPENDENCIES'] = ':'.join([ @@ -274,7 +269,6 @@ class CompilerTest(unittest.TestCase): ' '.join(test_command)) - def test_ld_deps(self): """Ensure no (extra) -I args or -Wl, are passed in ld mode.""" os.environ['SPACK_DEPENDENCIES'] = ':'.join([ diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index 36a4a73fe6..3a0ce32e6c 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -33,16 +33,17 @@ import spack.test.mock_database class TestModule(spack.test.mock_database.MockDatabase): def _get_module_files(self, args): - return [ - modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501 - ] + return [modules.module_types[args.module_type](spec).file_name + for spec in args.specs] def test_module_common_operations(self): parser = argparse.ArgumentParser() module.setup_parser(parser) + # Try to remove a non existing module [tcl] args = parser.parse_args(['rm', 'doesnotexist']) self.assertRaises(SystemExit, module.module, parser, args) + # Remove existing modules [tcl] args = parser.parse_args(['rm', '-y', 'mpileaks']) module_files = self._get_module_files(args) @@ -51,22 +52,28 @@ class TestModule(spack.test.mock_database.MockDatabase): module.module(parser, args) for item in module_files: self.assertFalse(os.path.exists(item)) + # Add them back [tcl] args = parser.parse_args(['refresh', '-y', 'mpileaks']) module.module(parser, args) for item in module_files: self.assertTrue(os.path.exists(item)) + # TODO : test the --delete-tree option # TODO : this requires having a separate directory for test modules + # Try to find a module with multiple matches args = parser.parse_args(['find', 'mpileaks']) self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module with no matches args = parser.parse_args(['find', 'doesnotexist']) self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module args = parser.parse_args(['find', 'libelf']) module.module(parser, args) + # Remove existing modules [dotkit] args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks']) module_files = self._get_module_files(args) @@ -75,6 +82,7 @@ class TestModule(spack.test.mock_database.MockDatabase): module.module(parser, args) for item in module_files: self.assertFalse(os.path.exists(item)) + # Add them back [dotkit] args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks']) module.module(parser, args) diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py index d89814154b..fa806ee6f4 100644 --- a/lib/spack/spack/test/cmd/test_compiler_cmd.py +++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py @@ -12,7 +12,9 @@ from spack.test.mock_packages_test import * test_version = '4.5-spacktest' + class MockArgs(object): + def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None): self.add_paths = add_paths self.scope = scope @@ -52,14 +54,12 @@ done class CompilerCmdTest(MockPackagesTest): """ Test compiler commands for add and remove """ - def test_compiler_remove(self): args = MockArgs(all=True, compiler_spec='gcc@4.5.0') spack.cmd.compiler.compiler_remove(args) compilers = spack.compilers.all_compilers() self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers) - def test_compiler_add(self): # compilers available by default. old_compilers = set(spack.compilers.all_compilers()) @@ -75,7 +75,8 @@ class CompilerCmdTest(MockPackagesTest): new_compilers = set(spack.compilers.all_compilers()) new_compiler = new_compilers - old_compilers self.assertTrue(new_compiler) - self.assertTrue(new_compiler.pop().version == Version(test_version)) + self.assertTrue(new_compiler.pop().version == + Version(test_version)) finally: shutil.rmtree(compiler_dir, ignore_errors=True) diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py index 9fffaace40..4ccb9ddbf4 100644 --- a/lib/spack/spack/test/cmd/uninstall.py +++ b/lib/spack/spack/test/cmd/uninstall.py @@ -28,6 +28,7 @@ from spack.cmd.uninstall import uninstall class MockArgs(object): + def __init__(self, packages, all=False, force=False, dependents=False): self.packages = packages self.all = all @@ -37,6 +38,7 @@ class MockArgs(object): class TestUninstall(spack.test.mock_database.MockDatabase): + def test_uninstall(self): parser = None # Multiple matches diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 252d77e66b..0822e44db8 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -32,79 +32,80 @@ from ordereddict_backport import OrderedDict from spack.test.mock_packages_test import * # Some sample compiler config data -a_comps = [ +a_comps = [ {'compiler': { 'paths': { - "cc" : "/gcc473", + "cc": "/gcc473", "cxx": "/g++473", "f77": None, - "fc" : None - }, + "fc": None + }, 'modules': None, 'spec': 'gcc@4.7.3', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "/gcc450", + "cc": "/gcc450", "cxx": "/g++450", "f77": 'gfortran', - "fc" : 'gfortran' - }, + "fc": 'gfortran' + }, 'modules': None, 'spec': 'gcc@4.5.0', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "<overwritten>", + "cc": "<overwritten>", "cxx": "<overwritten>", "f77": '<overwritten>', - "fc" : '<overwritten>' }, + "fc": '<overwritten>'}, 'modules': None, 'spec': 'clang@3.3', 'operating_system': 'CNL10' - }} + }} ] b_comps = [ {'compiler': { 'paths': { - "cc" : "/icc100", + "cc": "/icc100", "cxx": "/icp100", "f77": None, - "fc" : None - }, + "fc": None + }, 'modules': None, 'spec': 'icc@10.0', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "/icc111", + "cc": "/icc111", "cxx": "/icp111", "f77": 'ifort', - "fc" : 'ifort' - }, + "fc": 'ifort' + }, 'modules': None, 'spec': 'icc@11.1', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "<overwritten>", + "cc": "<overwritten>", "cxx": "<overwritten>", "f77": '<overwritten>', - "fc" : '<overwritten>' }, + "fc": '<overwritten>'}, 'modules': None, 'spec': 'clang@3.3', 'operating_system': 'CNL10' - }} + }} ] # Some Sample repo data -repos_low = [ "/some/path" ] -repos_high = [ "/some/other/path" ] +repos_low = ["/some/path"] +repos_high = ["/some/other/path"] + class ConfigTest(MockPackagesTest): @@ -112,14 +113,15 @@ class ConfigTest(MockPackagesTest): super(ConfigTest, self).setUp() self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') spack.config.config_scopes = OrderedDict() - spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')) - spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high')) + spack.config.ConfigScope( + 'test_low_priority', os.path.join(self.tmp_dir, 'low')) + spack.config.ConfigScope('test_high_priority', + os.path.join(self.tmp_dir, 'high')) def tearDown(self): super(ConfigTest, self).tearDown() shutil.rmtree(self.tmp_dir, True) - def check_config(self, comps, *compiler_names): """Check that named compilers in comps match Spack's config.""" config = spack.config.get_config('compilers') @@ -146,7 +148,7 @@ class ConfigTest(MockPackagesTest): spack.config.update_config('repos', repos_low, 'test_low_priority') spack.config.update_config('repos', repos_high, 'test_high_priority') config = spack.config.get_config('repos') - self.assertEqual(config, repos_high+repos_low) + self.assertEqual(config, repos_high + repos_low) def test_write_key_in_memory(self): # Write b_comps "on top of" a_comps. @@ -157,7 +159,6 @@ class ConfigTest(MockPackagesTest): self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3') - def test_write_key_to_disk(self): # Write b_comps "on top of" a_comps. spack.config.update_config('compilers', a_comps, 'test_low_priority') diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 0d44a27b7e..22b1f17890 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -71,6 +71,7 @@ def _print_ref_counts(): class DatabaseTest(MockDatabase): + def test_005_db_exists(self): """Make sure db cache file exists after creating.""" index_file = join_path(self.install_path, '.spack-db', 'index.yaml') diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 74669fe8a2..2d0565acae 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -49,13 +49,11 @@ class DirectoryLayoutTest(MockPackagesTest): self.tmpdir = tempfile.mkdtemp() self.layout = YamlDirectoryLayout(self.tmpdir) - def tearDown(self): super(DirectoryLayoutTest, self).tearDown() shutil.rmtree(self.tmpdir, ignore_errors=True) self.layout = None - def test_read_and_write_spec(self): """This goes through each package in spack and creates a directory for it. It then ensures that the spec for the directory's @@ -67,8 +65,8 @@ class DirectoryLayoutTest(MockPackagesTest): for pkg in packages: if pkg.name.startswith('external'): - #External package tests cannot be installed - continue + # External package tests cannot be installed + continue spec = pkg.spec # If a spec fails to concretize, just skip it. If it is a @@ -115,7 +113,6 @@ class DirectoryLayoutTest(MockPackagesTest): self.assertFalse(os.path.isdir(install_dir)) self.assertFalse(os.path.exists(install_dir)) - def test_handle_unknown_package(self): """This test ensures that spack can at least do *some* operations with packages that are installed but that it @@ -166,7 +163,6 @@ class DirectoryLayoutTest(MockPackagesTest): spack.repo.swap(mock_db) - def test_find(self): """Test that finding specs within an install layout works.""" packages = list(spack.repo.all_packages())[:max_packages] @@ -175,13 +171,14 @@ class DirectoryLayoutTest(MockPackagesTest): installed_specs = {} for pkg in packages: if pkg.name.startswith('external'): - #External package tests cannot be installed + # External package tests cannot be installed continue spec = pkg.spec.concretized() installed_specs[spec.name] = spec self.layout.create_install_directory(spec) - # Make sure all the installed specs appear in DirectoryLayout.all_specs() + # Make sure all the installed specs appear in + # DirectoryLayout.all_specs() found_specs = dict((s.name, s) for s in self.layout.all_specs()) for name, spec in found_specs.items(): self.assertTrue(name in found_specs) diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py index 2396961888..9b5d75f273 100644 --- a/lib/spack/spack/test/environment.py +++ b/lib/spack/spack/test/environment.py @@ -38,7 +38,8 @@ class EnvironmentTest(unittest.TestCase): os.environ['UNSET_ME'] = 'foo' os.environ['EMPTY_PATH_LIST'] = '' os.environ['PATH_LIST'] = '/path/second:/path/third' - os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' # NOQA: ignore=E501 + os.environ['REMOVE_PATH_LIST'] = \ + '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' def tearDown(self): pass diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 4de65760d7..0d1a8fe949 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -87,33 +87,29 @@ class GitFetchTest(MockPackagesTest): self.assert_rev(rev) - def test_fetch_master(self): """Test a default git checkout with no commit or tag specified.""" self.try_fetch('master', self.repo.r0_file, { - 'git' : self.repo.path + 'git': self.repo.path }) - def test_fetch_branch(self): """Test fetching a branch.""" self.try_fetch(self.repo.branch, self.repo.branch_file, { - 'git' : self.repo.path, - 'branch' : self.repo.branch + 'git': self.repo.path, + 'branch': self.repo.branch }) - def test_fetch_tag(self): """Test fetching a tag.""" self.try_fetch(self.repo.tag, self.repo.tag_file, { - 'git' : self.repo.path, - 'tag' : self.repo.tag + 'git': self.repo.path, + 'tag': self.repo.tag }) - def test_fetch_commit(self): """Test fetching a particular commit.""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'git' : self.repo.path, - 'commit' : self.repo.r1 + 'git': self.repo.path, + 'commit': self.repo.r1 }) diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 292ffba949..44af6730a1 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -83,17 +83,15 @@ class HgFetchTest(MockPackagesTest): self.assertEqual(self.repo.get_rev(), rev) - def test_fetch_default(self): """Test a default hg checkout with no commit or tag specified.""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'hg' : self.repo.path + 'hg': self.repo.path }) - def test_fetch_rev0(self): """Test fetching a branch.""" self.try_fetch(self.repo.r0, self.repo.r0_file, { - 'hg' : self.repo.path, - 'revision' : self.repo.r0 + 'hg': self.repo.path, + 'revision': self.repo.r0 }) diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 390ec096a9..232d5aeeaf 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -55,7 +55,6 @@ class InstallTest(MockPackagesTest): spack.install_layout = YamlDirectoryLayout(self.tmpdir) spack.installed_db = Database(self.tmpdir) - def tearDown(self): super(InstallTest, self).tearDown() self.repo.destroy() @@ -68,14 +67,12 @@ class InstallTest(MockPackagesTest): spack.installed_db = self.orig_db shutil.rmtree(self.tmpdir, ignore_errors=True) - def fake_fetchify(self, pkg): """Fake the URL for a package so it downloads from a file.""" fetcher = FetchStrategyComposite() fetcher.append(URLFetchStrategy(self.repo.url)) pkg.fetcher = fetcher - def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial_install_test_package') @@ -90,11 +87,10 @@ class InstallTest(MockPackagesTest): try: pkg.do_install() pkg.do_uninstall() - except Exception, e: + except Exception: pkg.remove_prefix() raise - def test_install_environment(self): spec = Spec('cmake-client').concretized() @@ -104,6 +100,6 @@ class InstallTest(MockPackagesTest): pkg = spec.package try: pkg.do_install() - except Exception, e: + except Exception: pkg.remove_prefix() raise diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index de40991b57..5d0a7430b6 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -53,16 +53,13 @@ class LinkTreeTest(unittest.TestCase): def tearDown(self): self.stage.destroy() - def check_file_link(self, filename): self.assertTrue(os.path.isfile(filename)) self.assertTrue(os.path.islink(filename)) - def check_dir(self, filename): self.assertTrue(os.path.isdir(filename)) - def test_merge_to_new_directory(self): with working_dir(self.stage.path): self.link_tree.merge('dest') @@ -79,7 +76,6 @@ class LinkTreeTest(unittest.TestCase): self.assertFalse(os.path.exists('dest')) - def test_merge_to_existing_directory(self): with working_dir(self.stage.path): @@ -112,7 +108,6 @@ class LinkTreeTest(unittest.TestCase): self.assertFalse(os.path.isfile('dest/c/d/6')) self.assertFalse(os.path.isfile('dest/c/d/e/7')) - def test_merge_with_empty_directories(self): with working_dir(self.stage.path): mkdirp('dest/f/g') @@ -132,7 +127,6 @@ class LinkTreeTest(unittest.TestCase): self.assertTrue(os.path.isdir('dest/a/b/h')) self.assertTrue(os.path.isdir('dest/f/g')) - def test_ignore(self): with working_dir(self.stage.path): touchp('source/.spec') diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index b24050aa74..fb96539897 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -329,6 +329,7 @@ class LockTest(unittest.TestCase): def test_transaction_with_context_manager(self): class TestContextManager(object): + def __enter__(self): vals['entered'] = True @@ -388,6 +389,7 @@ class LockTest(unittest.TestCase): def test_transaction_with_context_manager_and_exception(self): class TestContextManager(object): + def __enter__(self): vals['entered'] = True diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index b7a45a3f72..87a43a529a 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -38,6 +38,7 @@ from spack.util.environment import path_put_first class MakeExecutableTest(unittest.TestCase): + def setUp(self): self.tmpdir = tempfile.mkdtemp() @@ -49,34 +50,30 @@ class MakeExecutableTest(unittest.TestCase): path_put_first('PATH', [self.tmpdir]) - def tearDown(self): shutil.rmtree(self.tmpdir) - def test_make_normal(self): make = MakeExecutable('make', 8) self.assertEqual(make(output=str).strip(), '-j8') self.assertEqual(make('install', output=str).strip(), '-j8 install') - def test_make_explicit(self): make = MakeExecutable('make', 8) self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') - + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') def test_make_one_job(self): make = MakeExecutable('make', 1) self.assertEqual(make(output=str).strip(), '') self.assertEqual(make('install', output=str).strip(), 'install') - def test_make_parallel_false(self): make = MakeExecutable('make', 8) self.assertEqual(make(parallel=False, output=str).strip(), '') - self.assertEqual(make('install', parallel=False, output=str).strip(), 'install') - + self.assertEqual(make('install', parallel=False, + output=str).strip(), 'install') def test_make_parallel_disabled(self): make = MakeExecutable('make', 8) @@ -100,26 +97,29 @@ class MakeExecutableTest(unittest.TestCase): del os.environ['SPACK_NO_PARALLEL_MAKE'] - def test_make_parallel_precedence(self): make = MakeExecutable('make', 8) # These should work os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true' self.assertEqual(make(parallel=True, output=str).strip(), '') - self.assertEqual(make('install', parallel=True, output=str).strip(), 'install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), 'install') os.environ['SPACK_NO_PARALLEL_MAKE'] = '1' self.assertEqual(make(parallel=True, output=str).strip(), '') - self.assertEqual(make('install', parallel=True, output=str).strip(), 'install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), 'install') # These don't disable (false and random string) os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false' self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar' self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') del os.environ['SPACK_NO_PARALLEL_MAKE'] diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index b682d4e097..d6d7b30b7c 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -35,6 +35,7 @@ exclude = ['.hg', '.git', '.svn'] class MirrorTest(MockPackagesTest): + def setUp(self): """Sets up a mock package and a mock repo for each fetch strategy, to ensure that the mirror can create archives for each of them. @@ -42,7 +43,6 @@ class MirrorTest(MockPackagesTest): super(MirrorTest, self).setUp() self.repos = {} - def tearDown(self): """Destroy all the stages created by the repos in setup.""" super(MirrorTest, self).tearDown() @@ -50,7 +50,6 @@ class MirrorTest(MockPackagesTest): repo.destroy() self.repos.clear() - def set_up_package(self, name, MockRepoClass, url_attr): """Set up a mock package to be mirrored. Each package needs us to: @@ -71,16 +70,14 @@ class MirrorTest(MockPackagesTest): v = next(iter(pkg.versions)) pkg.versions[v][url_attr] = repo.url - def check_mirror(self): with Stage('spack-mirror-test') as stage: mirror_root = join_path(stage.path, 'test-mirror') # register mirror with spack config - mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } + mirrors = {'spack-mirror-test': 'file://' + mirror_root} spack.config.update_config('mirrors', mirrors) - os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) @@ -110,16 +107,18 @@ class MirrorTest(MockPackagesTest): original_path = mock_repo.path if 'svn' in name: # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') + original_path = join_path( + mock_repo.path, 'checked_out') svn('checkout', mock_repo.url, original_path) dcmp = dircmp(original_path, pkg.stage.source_path) - # make sure there are no new files in the expanded tarball + # make sure there are no new files in the expanded + # tarball self.assertFalse(dcmp.right_only) # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) + self.assertTrue( + all(l in exclude for l in dcmp.left_only)) spack.do_checksum = saved_checksum_setting - def test_git_mirror(self): self.set_up_package('git-test', MockGitRepo, 'git') self.check_mirror() diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py index da01e82bfa..d5867f06ec 100644 --- a/lib/spack/spack/test/mock_database.py +++ b/lib/spack/spack/test/mock_database.py @@ -33,6 +33,7 @@ from spack.test.mock_packages_test import MockPackagesTest class MockDatabase(MockPackagesTest): + def _mock_install(self, spec): s = Spec(spec) s.concretize() diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 9d96622a6e..82c2712b0e 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -155,7 +155,9 @@ packages: externalmodule@1.0%gcc@4.5.0: external-module """ + class MockPackagesTest(unittest.TestCase): + def initmock(self): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with @@ -172,7 +174,8 @@ class MockPackagesTest(unittest.TestCase): self.mock_user_config = os.path.join(self.temp_config, 'user') mkdirp(self.mock_site_config) mkdirp(self.mock_user_config) - for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]: + for confs in [('compilers.yaml', mock_compiler_config), + ('packages.yaml', mock_packages_config)]: conf_yaml = os.path.join(self.mock_site_config, confs[0]) with open(conf_yaml, 'w') as f: f.write(confs[1]) @@ -209,7 +212,6 @@ class MockPackagesTest(unittest.TestCase): pkg.dependencies[spec.name] = {Spec(pkg_name): spec} pkg._deptypes[spec.name] = set(deptypes) - def cleanmock(self): """Restore the real packages path after any test.""" spack.repo.swap(self.db) @@ -226,10 +228,8 @@ class MockPackagesTest(unittest.TestCase): shutil.rmtree(spack.share_path, ignore_errors=True) spack.share_path = self.real_share_path - def setUp(self): self.initmock() - def tearDown(self): self.cleanmock() diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py index 386af282e7..0ae7dbd516 100644 --- a/lib/spack/spack/test/mock_repo.py +++ b/lib/spack/spack/test/mock_repo.py @@ -40,6 +40,7 @@ tar = which('tar', required=True) class MockRepo(object): + def __init__(self, stage_name, repo_name): """This creates a stage where some archive/repo files can be staged for testing spack's fetch strategies.""" @@ -50,7 +51,6 @@ class MockRepo(object): self.path = join_path(self.stage.path, repo_name) mkdirp(self.path) - def destroy(self): """Destroy resources associated with this mock repo.""" if self.stage: @@ -90,6 +90,7 @@ class MockArchive(MockRepo): class MockVCSRepo(MockRepo): + def __init__(self, stage_name, repo_name): """This creates a stage and a repo directory within the stage.""" super(MockVCSRepo, self).__init__(stage_name, repo_name) @@ -100,6 +101,7 @@ class MockVCSRepo(MockRepo): class MockGitRepo(MockVCSRepo): + def __init__(self): super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo') @@ -147,6 +149,7 @@ class MockGitRepo(MockVCSRepo): class MockSvnRepo(MockVCSRepo): + def __init__(self): super(MockSvnRepo, self).__init__('mock-svn-stage', 'mock-svn-repo') @@ -176,6 +179,7 @@ class MockSvnRepo(MockVCSRepo): class MockHgRepo(MockVCSRepo): + def __init__(self): super(MockHgRepo, self).__init__('mock-hg-stage', 'mock-hg-repo') self.url = 'file://' + self.path diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index c233ea4fd6..a885374080 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -25,15 +25,10 @@ """ Test for multi_method dispatch. """ -import unittest - import spack from spack.multimethod import * from spack.version import * -from spack.spec import Spec -from spack.multimethod import when from spack.test.mock_packages_test import * -from spack.version import * class MultiMethodTest(MockPackagesTest): @@ -42,7 +37,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod@2.0') self.assertRaises(NoSuchMethodError, pkg.no_version_2) - def test_one_version_match(self): pkg = spack.repo.get('multimethod@1.0') self.assertEqual(pkg.no_version_2(), 1) @@ -53,7 +47,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod@4.0') self.assertEqual(pkg.no_version_2(), 4) - def test_version_overlap(self): pkg = spack.repo.get('multimethod@2.0') self.assertEqual(pkg.version_overlap(), 1) @@ -61,7 +54,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod@5.0') self.assertEqual(pkg.version_overlap(), 2) - def test_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@3.0.4') self.assertEqual(pkg.mpi_version(), 3) @@ -72,7 +64,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod^mpich@1.0') self.assertEqual(pkg.mpi_version(), 1) - def test_undefined_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@0.4') self.assertEqual(pkg.mpi_version(), 1) @@ -80,7 +71,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod^mpich@1.4') self.assertEqual(pkg.mpi_version(), 1) - def test_default_works(self): pkg = spack.repo.get('multimethod%gcc') self.assertEqual(pkg.has_a_default(), 'gcc') @@ -91,21 +81,19 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod%pgi') self.assertEqual(pkg.has_a_default(), 'default') - def test_target_match(self): platform = spack.architecture.platform() targets = platform.targets.values() for target in targets[:-1]: - pkg = spack.repo.get('multimethod target='+target.name) + pkg = spack.repo.get('multimethod target=' + target.name) self.assertEqual(pkg.different_by_target(), target.name) - pkg = spack.repo.get('multimethod target='+targets[-1].name) + pkg = spack.repo.get('multimethod target=' + targets[-1].name) if len(targets) == 1: self.assertEqual(pkg.different_by_target(), targets[-1].name) else: self.assertRaises(NoSuchMethodError, pkg.different_by_target) - def test_dependency_match(self): pkg = spack.repo.get('multimethod^zmpi') self.assertEqual(pkg.different_by_dep(), 'zmpi') @@ -118,7 +106,6 @@ class MultiMethodTest(MockPackagesTest): pkg = spack.repo.get('multimethod^foobar') self.assertEqual(pkg.different_by_dep(), 'mpich') - def test_virtual_dep_match(self): pkg = spack.repo.get('multimethod^mpich2') self.assertEqual(pkg.different_by_virtual_dep(), 2) diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index b38ecd6179..7927fc8e60 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -32,7 +32,6 @@ class NamespaceTrieTest(unittest.TestCase): def setUp(self): self.trie = NamespaceTrie() - def test_add_single(self): self.trie['foo'] = 'bar' @@ -40,7 +39,6 @@ class NamespaceTrieTest(unittest.TestCase): self.assertTrue(self.trie.has_value('foo')) self.assertEqual(self.trie['foo'], 'bar') - def test_add_multiple(self): self.trie['foo.bar'] = 'baz' @@ -54,7 +52,6 @@ class NamespaceTrieTest(unittest.TestCase): self.assertFalse(self.trie.is_prefix('foo.bar.baz')) self.assertFalse(self.trie.has_value('foo.bar.baz')) - def test_add_three(self): # add a three-level namespace self.trie['foo.bar.baz'] = 'quux' @@ -89,7 +86,6 @@ class NamespaceTrieTest(unittest.TestCase): self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux')) self.assertFalse(self.trie.has_value('foo.bar.baz.quux')) - def test_add_none_single(self): self.trie['foo'] = None self.assertTrue(self.trie.is_prefix('foo')) @@ -99,8 +95,6 @@ class NamespaceTrieTest(unittest.TestCase): self.assertFalse(self.trie.is_prefix('foo.bar')) self.assertFalse(self.trie.has_value('foo.bar')) - - def test_add_none_multiple(self): self.trie['foo.bar'] = None diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py index ed5f6ff8ad..8723f7244d 100644 --- a/lib/spack/spack/test/operating_system.py +++ b/lib/spack/spack/test/operating_system.py @@ -1,18 +1,39 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## """ Test checks if the operating_system class is created correctly and that the functions are using the correct operating_system. Also checks whether the operating_system correctly uses the compiler_strategy """ - import unittest -import os -import platform from spack.platforms.cray_xc import CrayXc from spack.platforms.linux import Linux from spack.platforms.darwin import Darwin from spack.operating_system.linux_distro import LinuxDistro -from spack.operating_system.mac_os import MacOs from spack.operating_system.cnl import ComputeNodeLinux + class TestOperatingSystem(unittest.TestCase): def setUp(self): @@ -32,7 +53,7 @@ class TestOperatingSystem(unittest.TestCase): self.assertEquals(self.cray_operating_sys.compiler_strategy, "PATH") def test_cray_back_end_operating_system(self): - self.assertIsInstance(self.cray_back_os,ComputeNodeLinux) + self.assertIsInstance(self.cray_back_os, ComputeNodeLinux) def test_cray_back_end_compiler_strategy(self): self.assertEquals(self.cray_back_os.compiler_strategy, "MODULES") @@ -43,7 +64,6 @@ class TestOperatingSystem(unittest.TestCase): def test_linux_compiler_strategy(self): self.assertEquals(self.linux_operating_sys.compiler_strategy, "PATH") - def test_cray_front_end_compiler_list(self): """ Operating systems will now be in charge of finding compilers. So, depending on which operating system you want to build for diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py index b5ba0ecf35..a9a2b9abf5 100644 --- a/lib/spack/spack/test/optional_deps.py +++ b/lib/spack/spack/test/optional_deps.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - from spack.spec import Spec from spack.test.mock_packages_test import * + class ConcretizeTest(MockPackagesTest): def check_normalize(self, spec_string, expected): @@ -34,10 +34,10 @@ class ConcretizeTest(MockPackagesTest): self.assertEqual(spec, expected) self.assertTrue(spec.eq_dag(expected)) - def test_normalize_simple_conditionals(self): self.check_normalize('optional-dep-test', Spec('optional-dep-test')) - self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a')) + self.check_normalize('optional-dep-test~a', + Spec('optional-dep-test~a')) self.check_normalize('optional-dep-test+a', Spec('optional-dep-test+a', Spec('a'))) @@ -45,7 +45,6 @@ class ConcretizeTest(MockPackagesTest): self.check_normalize('optional-dep-test a=true', Spec('optional-dep-test a=true', Spec('a'))) - self.check_normalize('optional-dep-test a=true', Spec('optional-dep-test+a', Spec('a'))) @@ -55,25 +54,29 @@ class ConcretizeTest(MockPackagesTest): self.check_normalize('optional-dep-test%intel', Spec('optional-dep-test%intel', Spec('c'))) - self.check_normalize('optional-dep-test%intel@64.1', - Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d'))) + self.check_normalize( + 'optional-dep-test%intel@64.1', + Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d'))) - self.check_normalize('optional-dep-test%intel@64.1.2', - Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d'))) + self.check_normalize( + 'optional-dep-test%intel@64.1.2', + Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d'))) self.check_normalize('optional-dep-test%clang@35', Spec('optional-dep-test%clang@35', Spec('e'))) - def test_multiple_conditionals(self): - self.check_normalize('optional-dep-test+a@1.1', - Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b'))) + self.check_normalize( + 'optional-dep-test+a@1.1', + Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b'))) - self.check_normalize('optional-dep-test+a%intel', - Spec('optional-dep-test+a%intel', Spec('a'), Spec('c'))) + self.check_normalize( + 'optional-dep-test+a%intel', + Spec('optional-dep-test+a%intel', Spec('a'), Spec('c'))) - self.check_normalize('optional-dep-test@1.1%intel', - Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c'))) + self.check_normalize( + 'optional-dep-test@1.1%intel', + Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c'))) self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a', Spec('optional-dep-test@1.1%intel@64.1.2+a', @@ -83,14 +86,12 @@ class ConcretizeTest(MockPackagesTest): Spec('optional-dep-test@1.1%clang@36.5+a', Spec('b'), Spec('a'), Spec('e'))) - def test_chained_mpi(self): self.check_normalize('optional-dep-test-2+mpi', Spec('optional-dep-test-2+mpi', Spec('optional-dep-test+mpi', Spec('mpi')))) - def test_default_variant(self): spec = Spec('optional-dep-test-3') spec.concretize() @@ -104,7 +105,6 @@ class ConcretizeTest(MockPackagesTest): spec.concretize() self.assertTrue('b' in spec) - def test_transitive_chain(self): # Each of these dependencies comes from a conditional # dependency on another. This requires iterating to evaluate diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 9198986f5d..c3c3923855 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -38,12 +38,10 @@ class PackageSanityTest(unittest.TestCase): for name in spack.repo.all_package_names(): spack.repo.get(name) - def test_get_all_packages(self): """Get all packages once and make sure that works.""" self.check_db() - def test_get_all_mock_packages(self): """Get the mock packages once each too.""" db = RepoPath(spack.mock_packages_path) @@ -51,7 +49,6 @@ class PackageSanityTest(unittest.TestCase): self.check_db() spack.repo.swap(db) - def test_url_versions(self): """Check URLs for regular packages, if they are explicitly defined.""" for pkg in spack.repo.all_packages(): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index bea42bb33a..fdd079a8f7 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - import spack from llnl.util.filesystem import join_path from spack.repository import Repo @@ -33,33 +32,26 @@ from spack.util.naming import mod_to_class class PackagesTest(MockPackagesTest): def test_load_package(self): - pkg = spack.repo.get('mpich') - + spack.repo.get('mpich') def test_package_name(self): pkg = spack.repo.get('mpich') self.assertEqual(pkg.name, 'mpich') - def test_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('mpich') self.assertEqual(filename, - join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py')) - - - def test_package_name(self): - pkg = spack.repo.get('mpich') - self.assertEqual(pkg.name, 'mpich') - + join_path(spack.mock_packages_path, + 'packages', 'mpich', 'package.py')) def test_nonexisting_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('some-nonexisting-package') self.assertEqual( filename, - join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py')) - + join_path(spack.mock_packages_path, + 'packages', 'some-nonexisting-package', 'package.py')) def test_package_class_names(self): self.assertEqual('Mpich', mod_to_class('mpich')) @@ -68,37 +60,32 @@ class PackagesTest(MockPackagesTest): self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective')) self.assertEqual('_3db', mod_to_class('3db')) - # # Below tests target direct imports of spack packages from the # spack.pkg namespace # def test_import_package(self): - import spack.pkg.builtin.mock.mpich - + import spack.pkg.builtin.mock.mpich # noqa def test_import_package_as(self): - import spack.pkg.builtin.mock.mpich as mp - + import spack.pkg.builtin.mock.mpich as mp # noqa def test_import_class_from_package(self): - from spack.pkg.builtin.mock.mpich import Mpich - + from spack.pkg.builtin.mock.mpich import Mpich # noqa def test_import_module_from_package(self): - from spack.pkg.builtin.mock import mpich - + from spack.pkg.builtin.mock import mpich # noqa def test_import_namespace_container_modules(self): - import spack.pkg - import spack.pkg as p - from spack import pkg + import spack.pkg # noqa + import spack.pkg as p # noqa + from spack import pkg # noqa - import spack.pkg.builtin - import spack.pkg.builtin as b - from spack.pkg import builtin + import spack.pkg.builtin # noqa + import spack.pkg.builtin as b # noqa + from spack.pkg import builtin # noqa - import spack.pkg.builtin.mock - import spack.pkg.builtin.mock as m - from spack.pkg.builtin import mock + import spack.pkg.builtin.mock # noqa + import spack.pkg.builtin.mock as m # noqa + from spack.pkg.builtin import mock # noqa diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py index 3419d600b8..0c772a0d2d 100644 --- a/lib/spack/spack/test/pattern.py +++ b/lib/spack/spack/test/pattern.py @@ -41,6 +41,7 @@ class CompositeTest(unittest.TestCase): raise NotImplemented('subtract not implemented') class One(Base): + def add(self): Base.counter += 1 @@ -48,6 +49,7 @@ class CompositeTest(unittest.TestCase): Base.counter -= 1 class Two(Base): + def add(self): Base.counter += 2 diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index 6c09effc56..5af55bdc5f 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -36,7 +36,8 @@ import llnl.util.tty as tty import pyqver2 import spack -spack_max_version = (2,6) +spack_max_version = (2, 6) + class PythonVersionTest(unittest.TestCase): @@ -51,12 +52,10 @@ class PythonVersionTest(unittest.TestCase): if re.match(r'^[^.#].*\.py$', filename): yield os.path.join(root, filename) - def package_py_files(self): for name in spack.repo.all_package_names(): yield spack.repo.filename_for_package_name(name) - def check_python_versions(self, *files): # dict version -> filename -> reasons all_issues = {} @@ -66,7 +65,7 @@ class PythonVersionTest(unittest.TestCase): versions = pyqver2.get_versions(pyfile.read()) for ver, reasons in versions.items(): if ver > spack_max_version: - if not ver in all_issues: + if ver not in all_issues: all_issues[ver] = {} all_issues[ver][fn] = reasons @@ -87,7 +86,7 @@ class PythonVersionTest(unittest.TestCase): tty.error("These files require version %d.%d:" % v) maxlen = max(len(f) for f, prob in msgs) - fmt = "%%-%ds%%s" % (maxlen+3) + fmt = "%%-%ds%%s" % (maxlen + 3) print fmt % ('File', 'Reason') print fmt % ('-' * (maxlen), '-' * 20) for msg in msgs: @@ -95,10 +94,8 @@ class PythonVersionTest(unittest.TestCase): self.assertTrue(len(all_issues) == 0) - def test_core_module_compatibility(self): self.check_python_versions(*self.pyfiles(spack.lib_path)) - def test_package_module_compatibility(self): self.check_python_versions(*self.pyfiles(spack.packages_path)) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py index 4ce854a1d8..12abce7b35 100644 --- a/lib/spack/spack/test/sbang.py +++ b/lib/spack/spack/test/sbang.py @@ -44,6 +44,7 @@ last_line = "last!\n" class SbangTest(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 8522431fbb..8f61c7ac76 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -455,6 +455,7 @@ class SpecDagTest(MockPackagesTest): run3 -b-> build3 """ + def test_deptype_traversal(self): dag = Spec('dtuse') dag.normalize() diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index b174e5305c..79ffc99298 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -22,18 +22,18 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest import spack.architecture from spack.spec import * from spack.test.mock_packages_test import * + class SpecSematicsTest(MockPackagesTest): """This tests satisfies(), constrain() and other semantic operations on specs.""" - # ================================================================================ + # ======================================================================== # Utility functions to set everything up. - # ================================================================================ + # ======================================================================== def check_satisfies(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) try: @@ -49,7 +49,6 @@ class SpecSematicsTest(MockPackagesTest): # right by left. Reverse is not always true. right.copy().constrain(left) - def check_unsatisfiable(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) try: @@ -62,7 +61,6 @@ class SpecSematicsTest(MockPackagesTest): self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left) - def check_constrain(self, expected, spec, constraint): exp = Spec(expected) spec = Spec(spec) @@ -70,53 +68,48 @@ class SpecSematicsTest(MockPackagesTest): spec.constrain(constraint) self.assertEqual(exp, spec) - def check_constrain_changed(self, spec, constraint): spec = Spec(spec) self.assertTrue(spec.constrain(constraint)) - def check_constrain_not_changed(self, spec, constraint): spec = Spec(spec) self.assertFalse(spec.constrain(constraint)) - def check_invalid_constraint(self, spec, constraint): spec = Spec(spec) constraint = Spec(constraint) self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint) - - # ================================================================================ + # ======================================================================== # Satisfiability - # ================================================================================ + # ======================================================================== def test_satisfies(self): self.check_satisfies('libelf@0.8.13', '@0:1') self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1') - def test_satisfies_namespace(self): self.check_satisfies('builtin.mpich', 'mpich') self.check_satisfies('builtin.mock.mpich', 'mpich') - # TODO: only works for deps now, but shouldn't we allow this for root spec? + # TODO: only works for deps now, but shouldn't we allow for root spec? # self.check_satisfies('builtin.mock.mpich', 'mpi') self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich') self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich') - def test_satisfies_namespaced_dep(self): """Ensure spec from same or unspecified namespace satisfies namespace constraint.""" self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich') self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi') - self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') - - self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich') + self.check_satisfies( + 'mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') + self.check_unsatisfiable( + 'mpileaks ^builtin.mock.mpich', '^builtin.mpich') def test_satisfies_compiler(self): self.check_satisfies('foo%gcc', '%gcc') @@ -124,7 +117,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_unsatisfiable('foo%intel', '%gcc') self.check_unsatisfiable('foo%intel', '%pgi') - def test_satisfies_compiler_version(self): self.check_satisfies('foo%gcc', '%gcc@4.7.2') self.check_satisfies('foo%intel', '%intel@4.7.2') @@ -139,7 +131,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7') self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3') - def test_satisfies_architecture(self): self.check_satisfies( 'foo platform=test target=frontend os=frontend', @@ -151,7 +142,6 @@ class SpecSematicsTest(MockPackagesTest): 'foo platform=test target=default_target os=default_os', 'platform=test target=default_target os=default_os') - def test_satisfies_dependencies(self): self.check_satisfies('mpileaks^mpich', '^mpich') self.check_satisfies('mpileaks^zmpi', '^zmpi') @@ -159,7 +149,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_unsatisfiable('mpileaks^mpich', '^zmpi') self.check_unsatisfiable('mpileaks^zmpi', '^mpich') - def test_satisfies_dependency_versions(self): self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3') self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0') @@ -173,7 +162,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_unsatisfiable( 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6') - def test_satisfies_virtual_dependencies(self): self.check_satisfies('mpileaks^mpi', '^mpi') self.check_satisfies('mpileaks^mpi', '^mpich') @@ -181,7 +169,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_satisfies('mpileaks^mpi', '^zmpi') self.check_unsatisfiable('mpileaks^mpich', '^zmpi') - def test_satisfies_virtual_dependency_versions(self): self.check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6') self.check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6') @@ -197,26 +184,23 @@ class SpecSematicsTest(MockPackagesTest): self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich2') self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0') - def test_satisfies_matching_variant(self): self.check_satisfies('mpich+foo', 'mpich+foo') self.check_satisfies('mpich~foo', 'mpich~foo') self.check_satisfies('mpich foo=1', 'mpich foo=1') - #confirm that synonymous syntax works correctly + # confirm that synonymous syntax works correctly self.check_satisfies('mpich+foo', 'mpich foo=True') self.check_satisfies('mpich foo=true', 'mpich+foo') self.check_satisfies('mpich~foo', 'mpich foo=FALSE') self.check_satisfies('mpich foo=False', 'mpich~foo') - def test_satisfies_unconstrained_variant(self): # only asked for mpich, no constraints. Either will do. self.check_satisfies('mpich+foo', 'mpich') self.check_satisfies('mpich~foo', 'mpich') self.check_satisfies('mpich foo=1', 'mpich') - def test_unsatisfiable_variants(self): # This case is different depending on whether the specs are concrete. @@ -230,24 +214,21 @@ class SpecSematicsTest(MockPackagesTest): self.check_unsatisfiable('mpich', 'mpich~foo', True) self.check_unsatisfiable('mpich', 'mpich foo=1', True) - def test_unsatisfiable_variant_mismatch(self): # No matchi in specs self.check_unsatisfiable('mpich~foo', 'mpich+foo') self.check_unsatisfiable('mpich+foo', 'mpich~foo') self.check_unsatisfiable('mpich foo=1', 'mpich foo=2') - def test_satisfies_matching_compiler_flag(self): self.check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"') - self.check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"') - + self.check_satisfies('mpich cppflags="-O3 -Wall"', + 'mpich cppflags="-O3 -Wall"') def test_satisfies_unconstrained_compiler_flag(self): # only asked for mpich, no constraints. Any will do. self.check_satisfies('mpich cppflags="-O3"', 'mpich') - def test_unsatisfiable_compiler_flag(self): # This case is different depending on whether the specs are concrete. @@ -257,11 +238,10 @@ class SpecSematicsTest(MockPackagesTest): # 'mpich' is concrete: self.check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True) - def test_unsatisfiable_compiler_flag_mismatch(self): # No matchi in specs - self.check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"') - + self.check_unsatisfiable( + 'mpich cppflags="-O3"', 'mpich cppflags="-O2"') def test_satisfies_virtual(self): # Don't use check_satisfies: it checks constrain() too, and @@ -270,25 +250,30 @@ class SpecSematicsTest(MockPackagesTest): self.assertTrue(Spec('mpich2').satisfies(Spec('mpi'))) self.assertTrue(Spec('zmpi').satisfies(Spec('mpi'))) - def test_satisfies_virtual_dep_with_virtual_constraint(self): """Ensure we can satisfy virtual constraints when there are multiple vdep providers in the specs.""" - self.assertTrue(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas')) - self.assertFalse(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas')) - - self.assertFalse(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas')) - self.assertTrue(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas')) - - - # ================================================================================ + self.assertTrue( + Spec('netlib-lapack ^openblas').satisfies( + 'netlib-lapack ^openblas')) + self.assertFalse( + Spec('netlib-lapack ^netlib-blas').satisfies( + 'netlib-lapack ^openblas')) + + self.assertFalse( + Spec('netlib-lapack ^openblas').satisfies( + 'netlib-lapack ^netlib-blas')) + self.assertTrue( + Spec('netlib-lapack ^netlib-blas').satisfies( + 'netlib-lapack ^netlib-blas')) + + # ======================================================================== # Indexing specs - # ================================================================================ + # ======================================================================== def test_self_index(self): s = Spec('callpath') self.assertTrue(s['callpath'] == s) - def test_dep_index(self): s = Spec('callpath') s.normalize() @@ -304,7 +289,6 @@ class SpecSematicsTest(MockPackagesTest): self.assertTrue(s['libelf'].name == 'libelf') self.assertTrue(s['mpi'].name == 'mpi') - def test_spec_contains_deps(self): s = Spec('callpath') s.normalize() @@ -313,7 +297,6 @@ class SpecSematicsTest(MockPackagesTest): self.assertTrue('libelf' in s) self.assertTrue('mpi' in s) - def test_virtual_index(self): s = Spec('callpath') s.concretize() @@ -327,7 +310,6 @@ class SpecSematicsTest(MockPackagesTest): s_zmpi = Spec('callpath ^zmpi') s_zmpi.concretize() - self.assertTrue(s['mpi'].name != 'mpi') self.assertTrue(s_mpich['mpi'].name == 'mpich') self.assertTrue(s_mpich2['mpi'].name == 'mpich2') @@ -336,29 +318,34 @@ class SpecSematicsTest(MockPackagesTest): for spec in [s, s_mpich, s_mpich2, s_zmpi]: self.assertTrue('mpi' in spec) - - # ================================================================================ + # ======================================================================== # Constraints - # ================================================================================ + # ======================================================================== def test_constrain_variants(self): self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3') self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6', - 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7') + 'libelf@0:2.5%gcc@2:4.6', + 'libelf@2.1:3%gcc@4.5:4.7') self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo') - self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo') + self.check_constrain('libelf+debug+foo', + 'libelf+debug', 'libelf+debug+foo') - self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1') - self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1') + self.check_constrain('libelf debug=2 foo=1', + 'libelf debug=2', 'libelf foo=1') + self.check_constrain('libelf debug=2 foo=1', + 'libelf debug=2', 'libelf debug=2 foo=1') self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo') - self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo') - + self.check_constrain('libelf+debug~foo', + 'libelf+debug', 'libelf+debug~foo') def test_constrain_compiler_flags(self): - self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cppflags="-Wall"') - self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"') - + self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', + 'libelf cflags="-O3"', 'libelf cppflags="-Wall"') + self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', + 'libelf cflags="-O3"', + 'libelf cflags="-O3" cppflags="-Wall"') def test_constrain_architecture(self): self.check_constrain('libelf target=default_target os=default_os', @@ -369,21 +356,24 @@ class SpecSematicsTest(MockPackagesTest): 'libelf target=default_target os=default_os') def test_constrain_compiler(self): - self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7') - self.check_constrain('libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7') - + self.check_constrain('libelf %gcc@4.4.7', + 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7') + self.check_constrain('libelf %gcc@4.4.7', + 'libelf', 'libelf %gcc@4.4.7') def test_invalid_constraint(self): self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3') - self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7') + self.check_invalid_constraint( + 'libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7') self.check_invalid_constraint('libelf+debug', 'libelf~debug') self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo') self.check_invalid_constraint('libelf debug=2', 'libelf debug=1') - self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"') + self.check_invalid_constraint( + 'libelf cppflags="-O3"', 'libelf cppflags="-O2"') self.check_invalid_constraint('libelf platform=test target=be os=be', - 'libelf target=fe os=fe') + 'libelf target=fe os=fe') def test_constrain_changed(self): self.check_constrain_changed('libelf', '@1.0') @@ -396,9 +386,10 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_changed('libelf', 'cppflags="-O3"') platform = spack.architecture.platform() - self.check_constrain_changed('libelf', 'target='+platform.target('default_target').name) - self.check_constrain_changed('libelf', 'os='+platform.operating_system('default_os').name) - + self.check_constrain_changed( + 'libelf', 'target=' + platform.target('default_target').name) + self.check_constrain_changed( + 'libelf', 'os=' + platform.operating_system('default_os').name) def test_constrain_not_changed(self): self.check_constrain_not_changed('libelf', 'libelf') @@ -409,12 +400,13 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_not_changed('libelf+debug', '+debug') self.check_constrain_not_changed('libelf~debug', '~debug') self.check_constrain_not_changed('libelf debug=2', 'debug=2') - self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"') + self.check_constrain_not_changed( + 'libelf cppflags="-O3"', 'cppflags="-O3"') platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_not_changed('libelf target='+default_target, 'target='+default_target) - + self.check_constrain_not_changed( + 'libelf target=' + default_target, 'target=' + default_target) def test_constrain_dependency_changed(self): self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0') @@ -426,18 +418,25 @@ class SpecSematicsTest(MockPackagesTest): platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_changed('libelf^foo', 'libelf^foo target='+default_target) - + self.check_constrain_changed( + 'libelf^foo', 'libelf^foo target=' + default_target) def test_constrain_dependency_not_changed(self): self.check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0') - self.check_constrain_not_changed('libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0') + self.check_constrain_not_changed( + 'libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0') self.check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc') - self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5') - self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug') - self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug') - self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"') + self.check_constrain_not_changed( + 'libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5') + self.check_constrain_not_changed( + 'libelf^foo+debug', 'libelf^foo+debug') + self.check_constrain_not_changed( + 'libelf^foo~debug', 'libelf^foo~debug') + self.check_constrain_not_changed( + 'libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"') platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_not_changed('libelf^foo target='+default_target, 'libelf^foo target='+default_target) + self.check_constrain_not_changed( + 'libelf^foo target=' + default_target, + 'libelf^foo target=' + default_target) diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 4a534d7b5c..3079288c77 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -55,17 +55,22 @@ complex_lex = [Token(ID, 'mvapich_foo'), class SpecSyntaxTest(unittest.TestCase): - # ================================================================================ + # ======================================================================== # Parse checks - # ================================================================================ + # ======================================================================== + def check_parse(self, expected, spec=None, remove_arch=True): """Assert that the provided spec is able to be parsed. - If this is called with one argument, it assumes that the string is - canonical (i.e., no spaces and ~ instead of - for variants) and that it - will convert back to the string it came from. - If this is called with two arguments, the first argument is the expected - canonical form and the second is a non-canonical input to be parsed. + If this is called with one argument, it assumes that the + string is canonical (i.e., no spaces and ~ instead of - for + variants) and that it will convert back to the string it came + from. + + If this is called with two arguments, the first argument is + the expected canonical form and the second is a non-canonical + input to be parsed. + """ if spec is None: spec = expected @@ -74,9 +79,8 @@ class SpecSyntaxTest(unittest.TestCase): parsed = (" ".join(str(spec) for spec in output)) self.assertEqual(expected, parsed) - def check_lex(self, tokens, spec): - """Check that the provided spec parses to the provided list of tokens.""" + """Check that the provided spec parses to the provided token list.""" lex_output = SpecLexer().lex(spec) for tok, spec_tok in zip(tokens, lex_output): if tok.type == ID: @@ -85,9 +89,9 @@ class SpecSyntaxTest(unittest.TestCase): # Only check the type for non-identifiers. self.assertEqual(tok.type, spec_tok.type) - # ================================================================================ + # ======================================================================== # Parse checks - # =============================================================================== + # ======================================================================== def test_package_names(self): self.check_parse("mvapich") self.check_parse("mvapich_foo") @@ -104,18 +108,37 @@ class SpecSyntaxTest(unittest.TestCase): self.check_parse("openmpi^hwloc@1.2e6:1.4b7-rc3") def test_full_specs(self): - self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e") - self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4^stackwalker@8.1_1e") - self.check_parse('mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4^stackwalker@8.1_1e') + self.check_parse( + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4" + "^stackwalker@8.1_1e") + self.check_parse( + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4" + "^stackwalker@8.1_1e") + self.check_parse( + 'mvapich_foo' + '^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4' + '^stackwalker@8.1_1e') def test_canonicalize(self): self.check_parse( - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e", - "mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e", + + "mvapich_foo " + "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 " + "^stackwalker@8.1_1e") self.check_parse( - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e", - "mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e", + + "mvapich_foo " + "^stackwalker@8.1_1e " + "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug") self.check_parse( "x^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", @@ -130,58 +153,81 @@ class SpecSyntaxTest(unittest.TestCase): self.assertRaises(SpecParseError, self.check_parse, "x::") def test_duplicate_variant(self): - self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug debug=true") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false debug=true") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false~debug") - + self.assertRaises(DuplicateVariantError, + self.check_parse, "x@1.2+debug+debug") + self.assertRaises(DuplicateVariantError, + self.check_parse, "x ^y@1.2+debug debug=true") + self.assertRaises(DuplicateVariantError, self.check_parse, + "x ^y@1.2 debug=false debug=true") + self.assertRaises(DuplicateVariantError, + self.check_parse, "x ^y@1.2 debug=false~debug") def test_duplicate_depdendence(self): - self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y") + self.assertRaises(DuplicateDependencyError, + self.check_parse, "x ^y ^y") def test_duplicate_compiler(self): - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%gcc") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%gcc%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%gcc") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%gcc%intel") - - - # ================================================================================ + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%intel%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%intel%gcc") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%gcc%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%intel%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%intel%gcc") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%gcc%intel") + + # ======================================================================== # Lex checks - # ================================================================================ + # ======================================================================== def test_ambiguous(self): # This first one is ambiguous because - can be in an identifier AND # indicate disabling an option. self.assertRaises( AssertionError, self.check_lex, complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4" + "^stackwalker@8.1_1e") - # The following lexes are non-ambiguous (add a space before -qt_4) and should all - # result in the tokens in complex_lex + # The following lexes are non-ambiguous (add a space before -qt_4) + # and should all result in the tokens in complex_lex def test_minimal_spaces(self): self.check_lex( complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4" + "^stackwalker@8.1_1e") self.check_lex( complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e") def test_spaces_between_dependences(self): self.check_lex( complex_lex, - "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 ^stackwalker @ 8.1_1e") + "mvapich_foo " + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 " + "^stackwalker @ 8.1_1e") self.check_lex( complex_lex, - "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker @ 8.1_1e") + "mvapich_foo " + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 " + "^stackwalker @ 8.1_1e") def test_spaces_between_options(self): self.check_lex( complex_lex, - "mvapich_foo ^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 ^stackwalker @8.1_1e") + "mvapich_foo " + "^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 " + "^stackwalker @8.1_1e") def test_way_too_many_spaces(self): self.check_lex( complex_lex, - "mvapich_foo ^ _openmpi @ 1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 ^ stackwalker @ 8.1_1e") + "mvapich_foo " + "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " + "^ stackwalker @ 8.1_1e") diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index d3e3bf1383..ec661bfe50 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -62,6 +62,7 @@ def use_tmp(use_tmp): class StageTest(unittest.TestCase): + def setUp(self): """This sets up a mock archive to fetch, and a mock temp space for use by the Stage class. It doesn't actually create the Stage -- that @@ -89,7 +90,6 @@ class StageTest(unittest.TestCase): # be removed. self.working_dir = os.getcwd() - def tearDown(self): """Blows away the test environment directory.""" shutil.rmtree(test_files_dir) @@ -100,7 +100,6 @@ class StageTest(unittest.TestCase): # restore spack's original tmp environment spack.tmp_dirs = self.old_tmp_dirs - def get_stage_path(self, stage, stage_name): """Figure out where a stage should be living. This depends on whether it's named. @@ -114,7 +113,6 @@ class StageTest(unittest.TestCase): self.assertTrue(stage.path.startswith(spack.stage_path)) return stage.path - def check_setup(self, stage, stage_name): """Figure out whether a stage was set up correctly.""" stage_path = self.get_stage_path(stage, stage_name) @@ -139,14 +137,12 @@ class StageTest(unittest.TestCase): # Make sure the stage path is NOT a link for a non-tmp stage self.assertFalse(os.path.islink(stage_path)) - def check_fetch(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertTrue(archive_name in os.listdir(stage_path)) self.assertEqual(join_path(stage_path, archive_name), stage.fetcher.archive_file) - def check_expand_archive(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertTrue(archive_name in os.listdir(stage_path)) @@ -162,19 +158,16 @@ class StageTest(unittest.TestCase): with open(readme) as file: self.assertEqual(readme_text, file.read()) - def check_chdir(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertEqual(os.path.realpath(stage_path), os.getcwd()) - def check_chdir_to_source(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertEqual( join_path(os.path.realpath(stage_path), archive_dir), os.getcwd()) - def check_destroy(self, stage, stage_name): """Figure out whether a stage was destroyed correctly.""" stage_path = self.get_stage_path(stage, stage_name) @@ -187,35 +180,30 @@ class StageTest(unittest.TestCase): target = os.path.realpath(stage_path) self.assertFalse(os.path.exists(target)) - def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): with Stage(archive_url, name=stage_name) as stage: self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) - def test_setup_and_destroy_name_without_tmp(self): with use_tmp(False): with Stage(archive_url, name=stage_name) as stage: self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) - def test_setup_and_destroy_no_name_with_tmp(self): with use_tmp(True): with Stage(archive_url) as stage: self.check_setup(stage, None) self.check_destroy(stage, None) - def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): with Stage(archive_url) as stage: self.check_setup(stage, None) self.check_destroy(stage, None) - def test_chdir(self): with Stage(archive_url, name=stage_name) as stage: stage.chdir() @@ -223,7 +211,6 @@ class StageTest(unittest.TestCase): self.check_chdir(stage, stage_name) self.check_destroy(stage, stage_name) - def test_fetch(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -232,7 +219,6 @@ class StageTest(unittest.TestCase): self.check_fetch(stage, stage_name) self.check_destroy(stage, stage_name) - def test_expand_archive(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -242,8 +228,7 @@ class StageTest(unittest.TestCase): self.check_expand_archive(stage, stage_name) self.check_destroy(stage, stage_name) - - def test_expand_archive(self): + def test_expand_archive_with_chdir(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() self.check_setup(stage, stage_name) @@ -254,7 +239,6 @@ class StageTest(unittest.TestCase): self.check_chdir_to_source(stage, stage_name) self.check_destroy(stage, stage_name) - def test_restage(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -278,20 +262,17 @@ class StageTest(unittest.TestCase): self.assertFalse('foobar' in os.listdir(stage.source_path)) self.check_destroy(stage, stage_name) - def test_no_keep_without_exceptions(self): with Stage(archive_url, name=stage_name, keep=False) as stage: pass self.check_destroy(stage, stage_name) - def test_keep_without_exceptions(self): with Stage(archive_url, name=stage_name, keep=True) as stage: pass path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) - def test_no_keep_with_exceptions(self): try: with Stage(archive_url, name=stage_name, keep=False) as stage: @@ -300,8 +281,7 @@ class StageTest(unittest.TestCase): path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) except: - pass # ignore here. - + pass # ignore here. def test_keep_exceptions(self): try: @@ -311,4 +291,4 @@ class StageTest(unittest.TestCase): path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) except: - pass # ignore here. + pass # ignore here. diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 0a745a090b..9ef7593ed1 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -94,17 +94,15 @@ class SvnFetchTest(MockPackagesTest): self.assert_rev(rev) - def test_fetch_default(self): """Test a default checkout and make sure it's on rev 1""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'svn' : self.repo.url + 'svn': self.repo.url }) - def test_fetch_r1(self): """Test fetching an older revision (0).""" self.try_fetch(self.repo.r0, self.repo.r0_file, { - 'svn' : self.repo.url, - 'revision' : self.repo.r0 + 'svn': self.repo.url, + 'revision': self.repo.r0 }) diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index 96af1c9b21..808694d186 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -26,6 +26,7 @@ import os from nose.plugins import Plugin + class Tally(Plugin): name = 'tally' diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index ffd4230f71..ca14dab958 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -34,20 +34,21 @@ class UrlExtrapolateTest(unittest.TestCase): def check_url(self, base, version, new_url): self.assertEqual(url.substitute_version(base, version), new_url) - def test_libelf_version(self): base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" self.check_url(base, '0.8.13', base) - self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz") - self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz") - self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") - + self.check_url( + base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz") + self.check_url( + base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz") + self.check_url( + base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") def test_libdwarf_version(self): base = "http://www.prevanders.net/libdwarf-20130729.tar.gz" self.check_url(base, '20130729', base) - self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz") - + self.check_url( + base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz") def test_dyninst_version(self): # Dyninst has a version twice in the URL. @@ -58,7 +59,6 @@ class UrlExtrapolateTest(unittest.TestCase): self.check_url(base, '8.3.1', "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz") - def test_partial_version_prefix(self): # Test now with a partial prefix earlier in the URL -- this is # hard to figure out so Spack only substitutes the last @@ -72,7 +72,6 @@ class UrlExtrapolateTest(unittest.TestCase): self.check_url(base, '8.3.1', "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz") - def test_scalasca_partial_version(self): # Note that this probably doesn't actually work, but sites are # inconsistent about their directory structure, so it's not @@ -84,19 +83,16 @@ class UrlExtrapolateTest(unittest.TestCase): self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1', 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz') - def test_mpileaks_version(self): self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3', 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz') - def test_gcc(self): self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7', 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2') self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7', 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2') - def test_github_raw(self): self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7', 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true') diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index 648996aaaa..6c944a3e7a 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -32,11 +32,11 @@ import spack.url as url class UrlParseTest(unittest.TestCase): + def assert_not_detected(self, string): self.assertRaises( url.UndetectableVersionError, url.parse_name_and_version, string) - def check(self, name, v, string, **kwargs): # Make sure correct name and version are extracted. parsed_name, parsed_v = url.parse_name_and_version(string) @@ -52,7 +52,6 @@ class UrlParseTest(unittest.TestCase): # build one with a specific version. self.assertEqual(string, url.substitute_version(string, v)) - def test_wwwoffle_version(self): self.check( 'wwwoffle', '2.9h', @@ -72,7 +71,7 @@ class UrlParseTest(unittest.TestCase): def test_version_all_dots(self): self.check( - 'foo.bar.la', '1.14','http://example.com/foo.bar.la.1.14.zip') + 'foo.bar.la', '1.14', 'http://example.com/foo.bar.la.1.14.zip') def test_version_underscore_separator(self): self.check( @@ -286,7 +285,7 @@ class UrlParseTest(unittest.TestCase): 'mvapich2', '1.9', 'http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz') - def test_mvapich2_19_version(self): + def test_mvapich2_20_version(self): self.check( 'mvapich2', '2.0', 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index 9cc04834b6..ea6374e3d2 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -26,37 +26,31 @@ This test does sanity checks on substituting new versions into URLs """ import unittest - import spack.url as url +base = "https://comp.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz" +stem = "https://comp.llnl.gov/linear_solvers/download/hypre-" + + class PackageSanityTest(unittest.TestCase): - def test_hypre_url_substitution(self): - base = "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz" + def test_hypre_url_substitution(self): self.assertEqual(url.substitute_version(base, '2.9.0b'), base) self.assertEqual( - url.substitute_version(base, '2.8.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.8.0b.tar.gz") + url.substitute_version(base, '2.8.0b'), stem + "2.8.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.7.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.7.0b.tar.gz") + url.substitute_version(base, '2.7.0b'), stem + "2.7.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.6.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.6.0b.tar.gz") + url.substitute_version(base, '2.6.0b'), stem + "2.6.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '1.14.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.14.0b.tar.gz") + url.substitute_version(base, '1.14.0b'), stem + "1.14.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '1.13.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.13.0b.tar.gz") + url.substitute_version(base, '1.13.0b'), stem + "1.13.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.0.0'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.0.0.tar.gz") + url.substitute_version(base, '2.0.0'), stem + "2.0.0.tar.gz") self.assertEqual( - url.substitute_version(base, '1.6.0'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.6.0.tar.gz") - + url.substitute_version(base, '1.6.0'), stem + "1.6.0.tar.gz") def test_otf2_url_substitution(self): base = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py index f1b83e7b71..dedbd15d10 100644 --- a/lib/spack/spack/test/yaml.py +++ b/lib/spack/spack/test/yaml.py @@ -45,26 +45,25 @@ config_file: """ test_data = { - 'config_file' : syaml.syaml_dict([ + 'config_file': syaml.syaml_dict([ ('x86_64', syaml.syaml_dict([ ('foo', '/path/to/foo'), ('bar', '/path/to/bar'), - ('baz', '/path/to/baz' )])), - ('some_list', [ 'item 1', 'item 2', 'item 3' ]), - ('another_list', [ 1, 2, 3 ]), + ('baz', '/path/to/baz')])), + ('some_list', ['item 1', 'item 2', 'item 3']), + ('another_list', [1, 2, 3]), ('some_key', 'some_string') ])} + class YamlTest(unittest.TestCase): def setUp(self): self.data = syaml.load(test_file) - def test_parse(self): self.assertEqual(test_data, self.data) - def test_dict_order(self): self.assertEqual( ['x86_64', 'some_list', 'another_list', 'some_key'], @@ -74,7 +73,6 @@ class YamlTest(unittest.TestCase): ['foo', 'bar', 'baz'], self.data['config_file']['x86_64'].keys()) - def test_line_numbers(self): def check(obj, start_line, end_line): self.assertEqual(obj._start_mark.line, start_line) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index f678a2dca9..02c9c04380 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -56,12 +56,12 @@ import spack.error import spack.util.compression as comp from spack.version import Version + # # Note: We call the input to most of these functions a "path" but the functions # work on paths and URLs. There's not a good word for both of these, but # "path" seemed like the most generic term. # - def find_list_url(url): """Finds a good list URL for the supplied URL. This depends on the site. By default, just assumes that a good list URL is the @@ -71,8 +71,8 @@ def find_list_url(url): url_types = [ # e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz - (r'^(https://github.com/[^/]+/[^/]+)/archive/', lambda m: m.group(1) + '/releases') - ] + (r'^(https://github.com/[^/]+/[^/]+)/archive/', + lambda m: m.group(1) + '/releases')] for pattern, fun in url_types: match = re.search(pattern, url) @@ -89,8 +89,10 @@ def strip_query_and_fragment(path): query, frag = components[3:5] suffix = '' - if query: suffix += '?' + query - if frag: suffix += '#' + frag + if query: + suffix += '?' + query + if frag: + suffix += '#' + frag return (urlunsplit(stripped), suffix) @@ -152,8 +154,10 @@ def downloaded_file_extension(path): """ match = re.search(r'github.com/.+/(zip|tar)ball/', path) if match: - if match.group(1) == 'zip': return 'zip' - elif match.group(1) == 'tar': return 'tar.gz' + if match.group(1) == 'zip': + return 'zip' + elif match.group(1) == 'tar': + return 'tar.gz' prefix, ext, suffix = split_url_extension(path) if not ext: @@ -193,7 +197,8 @@ def parse_version_offset(path): (r'[-_](R\d+[AB]\d*(-\d+)?)', path), # e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz - # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz + # e.g., + # https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz (r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path), # e.g. boost_1_39_0 @@ -201,7 +206,7 @@ def parse_version_offset(path): # e.g. foobar-4.5.1-1 # e.g. ruby-1.9.1-p243 - (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), + (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), # noqa # e.g. lame-398-1 (r'-((\d)+-\d)', stem), @@ -275,7 +280,8 @@ def parse_name_offset(path, v=None): name_types = [ (r'/sourceforge/([^/]+)/', path), - (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path), + (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % + (v, v), path), (r'/([^/]+)/(tarball|zipball)/', path), (r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path), (r'github.com/[^/]+/([^/]+)/archive', path), @@ -283,7 +289,8 @@ def parse_name_offset(path, v=None): (r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem (r'([^/]+)%s' % v, stem), - (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem. + # accept the path if name is not in stem. + (r'/([^/]+)[_.-]v?%s' % v, path), (r'/([^/]+)%s' % v, path), (r'^([^/]+)[_.-]v?%s' % v, path), @@ -326,7 +333,7 @@ def insensitize(string): return re.sub(r'([a-zA-Z])', to_ins, string) -def cumsum(elts, init=0, fn=lambda x:x): +def cumsum(elts, init=0, fn=lambda x: x): """Return cumulative sum of result of fn on each element in elts.""" sums = [] s = init @@ -337,21 +344,20 @@ def cumsum(elts, init=0, fn=lambda x:x): def substitution_offsets(path): - """This returns offsets for substituting versions and names in the provided path. - It is a helper for substitute_version(). + """This returns offsets for substituting versions and names in the + provided path. It is a helper for substitute_version(). """ # Get name and version offsets try: ver, vs, vl = parse_version_offset(path) name, ns, nl = parse_name_offset(path, ver) - except UndetectableNameError, e: + except UndetectableNameError: return (None, -1, -1, (), ver, vs, vl, (vs,)) - except UndetectableVersionError, e: + except UndetectableVersionError: return (None, -1, -1, (), None, -1, -1, ()) # protect extensions like bz2 from getting inadvertently # considered versions. - ext = comp.extension(path) path = comp.strip_extension(path) # Construct a case-insensitive regular expression for the package name. @@ -449,7 +455,7 @@ def color_url(path, **kwargs): Cyan: The version found by parse_version_offset(). Red: The name found by parse_name_offset(). - Green: Instances of version string substituted by substitute_version(). + Green: Instances of version string from substitute_version(). Magenta: Instances of the name (protected from substitution). Optional args: @@ -469,31 +475,46 @@ def color_url(path, **kwargs): nerr = verr = 0 out = StringIO() for i in range(len(path)): - if i == vs: out.write('@c'); verr += 1 - elif i == ns: out.write('@r'); nerr += 1 + if i == vs: + out.write('@c') + verr += 1 + elif i == ns: + out.write('@r') + nerr += 1 elif subs: - if i in voffs: out.write('@g') - elif i in noffs: out.write('@m') + if i in voffs: + out.write('@g') + elif i in noffs: + out.write('@m') out.write(path[i]) - if i == vs + vl - 1: out.write('@.'); verr += 1 - elif i == ns + nl - 1: out.write('@.'); nerr += 1 + if i == vs + vl - 1: + out.write('@.') + verr += 1 + elif i == ns + nl - 1: + out.write('@.') + nerr += 1 elif subs: if i in vends or i in nends: out.write('@.') if errors: - if nerr == 0: out.write(" @r{[no name]}") - if verr == 0: out.write(" @r{[no version]}") - if nerr == 1: out.write(" @r{[incomplete name]}") - if verr == 1: out.write(" @r{[incomplete version]}") + if nerr == 0: + out.write(" @r{[no name]}") + if verr == 0: + out.write(" @r{[no version]}") + if nerr == 1: + out.write(" @r{[incomplete name]}") + if verr == 1: + out.write(" @r{[incomplete version]}") return colorize(out.getvalue()) class UrlParseError(spack.error.SpackError): """Raised when the URL module can't parse something correctly.""" + def __init__(self, msg, path): super(UrlParseError, self).__init__(msg) self.path = path @@ -501,6 +522,7 @@ class UrlParseError(spack.error.SpackError): class UndetectableVersionError(UrlParseError): """Raised when we can't parse a version from a string.""" + def __init__(self, path): super(UndetectableVersionError, self).__init__( "Couldn't detect version in: " + path, path) @@ -508,6 +530,7 @@ class UndetectableVersionError(UrlParseError): class UndetectableNameError(UrlParseError): """Raised when we can't parse a package name from a string.""" + def __init__(self, path): super(UndetectableNameError, self).__init__( "Couldn't parse package name in: " + path, path) diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index dc1188eb0f..64554ab2f7 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -32,7 +32,9 @@ PRE_EXTS = ["tar"] EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"] # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz -ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS +ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product( + PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + def allowed_archive(path): return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES) diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 1ae9793518..22777fdb68 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -31,7 +31,7 @@ _acceptable_hashes = [ hashlib.sha224, hashlib.sha256, hashlib.sha384, - hashlib.sha512 ] + hashlib.sha512] """Index for looking up hasher for a digest.""" _size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes) @@ -52,7 +52,6 @@ def checksum(hashlib_algo, filename, **kwargs): return hasher.hexdigest() - class Checker(object): """A checker checks files against one particular hex digest. It will automatically determine what hashing algorithm @@ -74,25 +73,25 @@ class Checker(object): adjusting the block_size optional arg. By default it's a 1MB (2**20 bytes) buffer. """ + def __init__(self, hexdigest, **kwargs): self.block_size = kwargs.get('block_size', 2**20) self.hexdigest = hexdigest self.sum = None bytes = len(hexdigest) / 2 - if not bytes in _size_to_hash: + if bytes not in _size_to_hash: raise ValueError( - 'Spack knows no hash algorithm for this digest: %s' % hexdigest) + 'Spack knows no hash algorithm for this digest: %s' + % hexdigest) self.hash_fun = _size_to_hash[bytes] - @property def hash_name(self): """Get the name of the hash function this Checker is using.""" return self.hash_fun().name - def check(self, filename): """Read the file with the specified name and check its checksum against self.hexdigest. Return True if they match, False diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py index e8a0595416..cf485a611d 100644 --- a/lib/spack/spack/util/debug.py +++ b/lib/spack/spack/util/debug.py @@ -33,10 +33,11 @@ import code import traceback import signal + def debug_handler(sig, frame): """Interrupt running process, and provide a python prompt for interactive debugging.""" - d = {'_frame':frame} # Allow access to frame object. + d = {'_frame': frame} # Allow access to frame object. d.update(frame.f_globals) # Unless shadowed by global d.update(frame.f_locals) @@ -48,5 +49,5 @@ def debug_handler(sig, frame): def register_interrupt_handler(): - """Register a handler to print a stack trace and enter an interpreter on Ctrl-C""" + """Print traceback and enter an interpreter on Ctrl-C""" signal.signal(signal.SIGINT, debug_handler) diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 14b56e8d6c..5c27b92df5 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -165,7 +165,6 @@ class Executable(object): raise ProcessError("Command exited with status %d:" % proc.returncode, cmd_line) - if output is str or error is str: result = '' if output is str: @@ -227,6 +226,7 @@ def which(name, **kwargs): class ProcessError(spack.error.SpackError): + def __init__(self, msg, long_message=None): # These are used for detailed debugging information for # package builds. They're built up gradually as the exception diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py index 038cd90121..6a25c45713 100644 --- a/lib/spack/spack/util/multiproc.py +++ b/lib/spack/spack/util/multiproc.py @@ -32,18 +32,21 @@ from itertools import izip __all__ = ['spawn', 'parmap', 'Barrier'] + def spawn(f): - def fun(pipe,x): + def fun(pipe, x): pipe.send(f(x)) pipe.close() return fun -def parmap(f,X): - pipe=[Pipe() for x in X] - proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)] + +def parmap(f, X): + pipe = [Pipe() for x in X] + proc = [Process(target=spawn(f), args=(c, x)) + for x, (p, c) in izip(X, pipe)] [p.start() for p in proc] [p.join() for p in proc] - return [p.recv() for (p,c) in pipe] + return [p.recv() for (p, c) in pipe] class Barrier: @@ -53,6 +56,7 @@ class Barrier: See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41. """ + def __init__(self, n, timeout=None): self.n = n self.to = timeout @@ -61,7 +65,6 @@ class Barrier: self.turnstile1 = Semaphore(0) self.turnstile2 = Semaphore(1) - def wait(self): if not self.mutex.acquire(timeout=self.to): raise BarrierTimeoutError() @@ -90,4 +93,5 @@ class Barrier: self.turnstile2.release() -class BarrierTimeoutError: pass +class BarrierTimeoutError: + pass diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index 2d9762942d..9a5cdee411 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -31,9 +31,15 @@ from StringIO import StringIO import spack -__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name', - 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name', - 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie'] +__all__ = [ + 'mod_to_class', + 'spack_module_to_python_module', + 'valid_module_name', + 'valid_fully_qualified_module_name', + 'validate_fully_qualified_module_name', + 'validate_module_name', + 'possible_spack_module_names', + 'NamespaceTrie'] # Valid module names can contain '-' but can't start with it. _valid_module_re = r'^\w[\w-]*$' @@ -67,8 +73,8 @@ def mod_to_class(mod_name): class_name = string.capwords(class_name, '-') class_name = class_name.replace('-', '') - # If a class starts with a number, prefix it with Number_ to make it a valid - # Python class name. + # If a class starts with a number, prefix it with Number_ to make it + # a valid Python class name. if re.match(r'^[0-9]', class_name): class_name = "_%s" % class_name @@ -126,6 +132,7 @@ def validate_fully_qualified_module_name(mod_name): class InvalidModuleNameError(spack.error.SpackError): """Raised when we encounter a bad module name.""" + def __init__(self, name): super(InvalidModuleNameError, self).__init__( "Invalid module name: " + name) @@ -134,6 +141,7 @@ class InvalidModuleNameError(spack.error.SpackError): class InvalidFullyQualifiedModuleNameError(spack.error.SpackError): """Raised when we encounter a bad full package name.""" + def __init__(self, name): super(InvalidFullyQualifiedModuleNameError, self).__init__( "Invalid fully qualified package name: " + name) @@ -141,17 +149,17 @@ class InvalidFullyQualifiedModuleNameError(spack.error.SpackError): class NamespaceTrie(object): + class Element(object): + def __init__(self, value): self.value = value - def __init__(self, separator='.'): self._subspaces = {} self._value = None self._sep = separator - def __setitem__(self, namespace, value): first, sep, rest = namespace.partition(self._sep) @@ -164,7 +172,6 @@ class NamespaceTrie(object): self._subspaces[first][rest] = value - def _get_helper(self, namespace, full_name): first, sep, rest = namespace.partition(self._sep) if not first: @@ -176,13 +183,12 @@ class NamespaceTrie(object): else: return self._subspaces[first]._get_helper(rest, full_name) - def __getitem__(self, namespace): return self._get_helper(namespace, namespace) - def is_prefix(self, namespace): - """True if the namespace has a value, or if it's the prefix of one that does.""" + """True if the namespace has a value, or if it's the prefix of one that + does.""" first, sep, rest = namespace.partition(self._sep) if not first: return True @@ -191,7 +197,6 @@ class NamespaceTrie(object): else: return self._subspaces[first].is_prefix(rest) - def is_leaf(self, namespace): """True if this namespace has no children in the trie.""" first, sep, rest = namespace.partition(self._sep) @@ -202,7 +207,6 @@ class NamespaceTrie(object): else: return self._subspaces[first].is_leaf(rest) - def has_value(self, namespace): """True if there is a value set for the given namespace.""" first, sep, rest = namespace.partition(self._sep) @@ -213,20 +217,17 @@ class NamespaceTrie(object): else: return self._subspaces[first].has_value(rest) - def __contains__(self, namespace): """Returns whether a value has been set for the namespace.""" return self.has_value(namespace) - def _str_helper(self, stream, level=0): indent = (level * ' ') for name in sorted(self._subspaces): stream.write(indent + name + '\n') if self._value: stream.write(indent + ' ' + repr(self._value.value)) - stream.write(self._subspaces[name]._str_helper(stream, level+1)) - + stream.write(self._subspaces[name]._str_helper(stream, level + 1)) def __str__(self): stream = StringIO() diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index bc5e9d2ffe..c36445193c 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -53,7 +53,9 @@ def composite(interface=None, method_list=None, container=list): # Check if at least one of the 'interface' or the 'method_list' arguments # are defined if interface is None and method_list is None: - raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501 + raise TypeError( + "Either 'interface' or 'method_list' must be defined on a call " + "to composite") def cls_decorator(cls): # Retrieve the base class of the composite. Inspect its methods and @@ -102,7 +104,8 @@ def composite(interface=None, method_list=None, container=list): # python@2.7: interface_methods = {name: method for name, method in # inspect.getmembers(interface, predicate=no_special_no_private)} interface_methods = {} - for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501 + for name, method in inspect.getmembers( + interface, predicate=no_special_no_private): interface_methods[name] = method ########## # python@2.7: interface_methods_dict = {name: IterateOver(name, @@ -118,7 +121,8 @@ def composite(interface=None, method_list=None, container=list): # python@2.7: cls_method = {name: method for name, method in # inspect.getmembers(cls, predicate=inspect.ismethod)} cls_method = {} - for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501 + for name, method in inspect.getmembers( + cls, predicate=inspect.ismethod): cls_method[name] = method ########## dictionary_for_type_call.update(cls_method) diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py index e1a0f2958b..985d862269 100644 --- a/lib/spack/spack/util/prefix.py +++ b/lib/spack/spack/util/prefix.py @@ -27,6 +27,7 @@ This file contains utilities to help with installing packages. """ from llnl.util.filesystem import join_path + class Prefix(str): """This class represents an installation prefix, but provides useful attributes for referring to directories inside the prefix. diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py index 909f9a57a8..7bcdf2d61f 100644 --- a/lib/spack/spack/util/spack_yaml.py +++ b/lib/spack/spack/util/spack_yaml.py @@ -34,7 +34,6 @@ import yaml from yaml.nodes import * from yaml.constructor import ConstructorError -from yaml.representer import SafeRepresenter from ordereddict_backport import OrderedDict # Only export load and dump @@ -42,15 +41,23 @@ __all__ = ['load', 'dump'] # Make new classes so we can add custom attributes. # Also, use OrderedDict instead of just dict. + + class syaml_dict(OrderedDict): + def __repr__(self): - mappings = ('%r: %r' % (k,v) for k,v in self.items()) + mappings = ('%r: %r' % (k, v) for k, v in self.items()) return '{%s}' % ', '.join(mappings) + + class syaml_list(list): __repr__ = list.__repr__ + + class syaml_str(str): __repr__ = str.__repr__ + def mark(obj, node): """Add start and end markers to an object.""" obj._start_mark = node.start_mark @@ -73,6 +80,7 @@ class OrderedLineLoader(yaml.Loader): # The standard YAML constructors return empty instances and fill # in with mappings later. We preserve this behavior. # + def construct_yaml_str(self, node): value = self.construct_scalar(node) try: @@ -83,14 +91,12 @@ class OrderedLineLoader(yaml.Loader): mark(value, node) return value - def construct_yaml_seq(self, node): data = syaml_list() mark(data, node) yield data data.extend(self.construct_sequence(node)) - def construct_yaml_map(self, node): data = syaml_dict() mark(data, node) @@ -104,22 +110,23 @@ class OrderedLineLoader(yaml.Loader): # def construct_sequence(self, node, deep=False): if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - value = syaml_list(self.construct_object(child, deep=deep) - for child in node.value) + raise ConstructorError( + None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + value = syaml_list(self.construct_object(child, deep=deep) + for child in node.value) mark(value, node) return value - def construct_mapping(self, node, deep=False): """Store mappings as OrderedDicts instead of as regular python dictionaries to preserve file ordering.""" if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + raise ConstructorError( + None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) mapping = syaml_dict() for key_node, value_node in node.value: @@ -127,22 +134,26 @@ class OrderedLineLoader(yaml.Loader): try: hash(key) except TypeError, exc: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) + raise ConstructorError( + "while constructing a mapping", node.start_mark, + "found unacceptable key (%s)" % exc, key_node.start_mark) value = self.construct_object(value_node, deep=deep) if key in mapping: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found already in-use key (%s)" % key, key_node.start_mark) + raise ConstructorError( + "while constructing a mapping", node.start_mark, + "found already in-use key (%s)" % key, key_node.start_mark) mapping[key] = value mark(mapping, node) return mapping # register above new constructors -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map) -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq) -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) - +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map) +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq) +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) class OrderedLineDumper(yaml.Dumper): @@ -154,6 +165,7 @@ class OrderedLineDumper(yaml.Dumper): regular Python equivalents, instead of ugly YAML pyobjects. """ + def represent_mapping(self, tag, mapping, flow_style=None): value = [] node = MappingNode(tag, value, flow_style=flow_style) @@ -173,7 +185,8 @@ class OrderedLineDumper(yaml.Dumper): node_value = self.represent_data(item_value) if not (isinstance(node_key, ScalarNode) and not node_key.style): best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): + if not (isinstance(node_value, ScalarNode) and + not node_value.style): best_style = False value.append((node_key, node_value)) if flow_style is None: diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py index ce017b8126..dae7afbf46 100644 --- a/lib/spack/spack/util/string.py +++ b/lib/spack/spack/util/string.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## + def comma_list(sequence, article=''): if type(sequence) != list: sequence = list(sequence) @@ -32,7 +33,7 @@ def comma_list(sequence, article=''): elif len(sequence) == 1: return sequence[0] else: - out = ', '.join(str(s) for s in sequence[:-1]) + out = ', '.join(str(s) for s in sequence[:-1]) if len(sequence) != 2: out += ',' # oxford comma out += ' ' @@ -41,6 +42,7 @@ def comma_list(sequence, article=''): out += str(sequence[-1]) return out + def comma_or(sequence): return comma_list(sequence, 'or') diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index cac783a368..25f1e605d6 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -43,6 +43,7 @@ TIMEOUT = 10 class LinkParser(HTMLParser): """This parser just takes an HTML page and strips out the hrefs on the links. Good enough for a really simple spider. """ + def __init__(self): HTMLParser.__init__(self) self.links = [] diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index ad875f5ef5..b2c1a73489 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -29,8 +29,10 @@ currently variants are just flags. """ + class Variant(object): """Represents a variant on a build. Can be either on or off.""" + def __init__(self, default, description): self.default = default self.description = str(description) diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index 6f6c83b3d8..e1311eb0d9 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -50,6 +50,8 @@ from functools import wraps from functools_backport import total_ordering +__all__ = ['Version', 'VersionRange', 'VersionList', 'ver'] + # Valid version characters VALID_VERSION = r'[A-Za-z0-9_.-]' @@ -346,8 +348,8 @@ class VersionRange(object): s, o = self, other if s.start != o.start: - return s.start is None or (o.start is not None and s.start < o.start) # NOQA: ignore=E501 - + return s.start is None or ( + o.start is not None and s.start < o.start) return (s.end != o.end and o.end is None or (s.end is not None and s.end < o.end)) diff --git a/lib/spack/spack/yaml_version_check.py b/lib/spack/spack/yaml_version_check.py index c2d084d6c3..2c5b511d7f 100644 --- a/lib/spack/spack/yaml_version_check.py +++ b/lib/spack/spack/yaml_version_check.py @@ -34,6 +34,7 @@ import spack.config def check_yaml_versions(): check_compiler_yaml_version() + def check_compiler_yaml_version(): config_scopes = spack.config.config_scopes for scope in config_scopes.values(): @@ -46,7 +47,8 @@ def check_compiler_yaml_version(): if data: compilers = data['compilers'] if len(compilers) > 0: - if (not isinstance(compilers, list)) or 'operating_system' not in compilers[0]['compiler']: + if (not isinstance(compilers, list) or + 'operating_system' not in compilers[0]['compiler']): new_file = os.path.join(scope.path, '_old_compilers.yaml') tty.warn('%s in out of date compilers format. ' 'Moved to %s. Spack automatically generate ' |