summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst4
-rwxr-xr-xlib/spack/env/cc4
l---------lib/spack/env/craype/CC1
l---------lib/spack/env/craype/cc1
l---------lib/spack/env/craype/ftn1
-rw-r--r--lib/spack/spack/architecture.py19
-rw-r--r--lib/spack/spack/build_environment.py1
-rw-r--r--lib/spack/spack/cmd/__init__.py120
-rw-r--r--lib/spack/spack/cmd/common/__init__.py24
-rw-r--r--lib/spack/spack/cmd/common/arguments.py96
-rw-r--r--lib/spack/spack/cmd/find.py85
-rw-r--r--lib/spack/spack/cmd/module.py328
-rw-r--r--lib/spack/spack/cmd/uninstall.py62
-rw-r--r--lib/spack/spack/config.py14
-rw-r--r--lib/spack/spack/modules.py26
-rw-r--r--lib/spack/spack/package.py7
-rw-r--r--lib/spack/spack/platforms/cray_xc.py40
-rw-r--r--lib/spack/spack/test/__init__.py3
-rw-r--r--lib/spack/spack/test/cmd/find.py6
-rw-r--r--lib/spack/spack/test/cmd/module.py83
-rw-r--r--lib/spack/spack/util/pattern.py78
21 files changed, 695 insertions, 308 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 50a161a175..948092047b 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -1866,6 +1866,10 @@ to call the Cray compiler wrappers during build time.
For more on compiler configuration, check out :ref:`compiler-config`.
+Spack sets the default Cray link type to dynamic, to better match other
+other platforms. Individual packages can enable static linking (which is the
+default outside of Spack on cray systems) using the -static flag.
+
Setting defaults and using Cray modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index bf98b4c354..c6bb50d261 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -110,13 +110,13 @@ case "$command" in
comp="CXX"
lang_flags=CXX
;;
- f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
+ ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
;;
- f77|gfortran|ifort|pgfortran|xlf|nagfor)
+ f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
diff --git a/lib/spack/env/craype/CC b/lib/spack/env/craype/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/cc b/lib/spack/env/craype/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/ftn b/lib/spack/env/craype/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index a7cda2bf68..974505ee3a 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -76,7 +76,6 @@ attributes front_os and back_os. The operating system as described earlier,
will be responsible for compiler detection.
"""
import os
-import imp
import inspect
from llnl.util.lang import memoized, list_modules, key_ordering
@@ -190,6 +189,12 @@ class Platform(object):
return self.operating_sys.get(name, None)
+ @classmethod
+ def setup_platform_environment(self, pkg, env):
+ """ Subclass can override this method if it requires any
+ platform-specific build environment modifications.
+ """
+ pass
@classmethod
def detect(self):
@@ -200,15 +205,12 @@ class Platform(object):
"""
raise NotImplementedError()
-
def __repr__(self):
return self.__str__()
-
def __str__(self):
return self.name
-
def _cmp_key(self):
t_keys = ''.join(str(t._cmp_key()) for t in
sorted(self.targets.values()))
@@ -279,7 +281,7 @@ class OperatingSystem(object):
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x, y: x+y, compiler_lists)
+ clist = reduce(lambda x, y: x + y, compiler_lists)
return clist
def find_compiler(self, cmp_cls, *path):
@@ -320,7 +322,7 @@ class OperatingSystem(object):
# prefer the one with more compilers.
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
- newcount = len([p for p in paths if p is not None])
+ newcount = len([p for p in paths if p is not None])
prevcount = len([p for p in prev_paths if p is not None])
# Don't add if it's not an improvement over prev compiler.
@@ -337,6 +339,7 @@ class OperatingSystem(object):
d['version'] = self.version
return d
+
@key_ordering
class Arch(object):
"""Architecture is now a class to help with setting attributes.
@@ -377,11 +380,9 @@ class Arch(object):
else:
return ''
-
def __contains__(self, string):
return string in str(self)
-
def _cmp_key(self):
if isinstance(self.platform, Platform):
platform = self.platform.name
@@ -424,7 +425,7 @@ def _operating_system_from_dict(os_name, plat=None):
if isinstance(os_name, dict):
name = os_name['name']
version = os_name['version']
- return plat.operating_system(name+version)
+ return plat.operating_system(name + version)
else:
return plat.operating_system(os_name)
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 93fb0690f7..5affd3c7c5 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -444,6 +444,7 @@ def setup_package(pkg, dirty=False):
set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env, dirty)
+ pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 672999159c..230115df50 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -27,16 +27,18 @@ import re
import sys
import llnl.util.tty as tty
-from llnl.util.lang import attr_setdefault
-
import spack
-import spack.spec
import spack.config
+import spack.spec
+from llnl.util.lang import *
+from llnl.util.tty.colify import *
+from llnl.util.tty.color import *
#
# Settings for commands that modify configuration
#
-# Commands that modify confguration By default modify the *highest* priority scope.
+# Commands that modify confguration By default modify the *highest*
+# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default.
default_list_scope = None
@@ -48,7 +50,7 @@ python_list = list
ignore_files = r'^\.|^__init__.py$|^#'
SETUP_PARSER = "setup_parser"
-DESCRIPTION = "description"
+DESCRIPTION = "description"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
@@ -71,7 +73,7 @@ def get_module(name):
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
- attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
+ attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
@@ -101,17 +103,17 @@ def parse_specs(args, **kwargs):
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
- spec.concretize() # implies normalize
+ spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
- except spack.parse.ParseError, e:
+ except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1)
- except spack.spec.SpecError, e:
+ except spack.spec.SpecError as e:
tty.error(e.message)
sys.exit(1)
@@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
- return line_list[:max_num-1] + ['...'] + line_list[-1:]
+ return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
@@ -138,10 +140,104 @@ def disambiguate_spec(spec):
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
+ args = ["%s matches multiple packages." % spec,
+ "Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]
+
+
+def ask_for_confirmation(message):
+ while True:
+ tty.msg(message + '[y/n]')
+ choice = raw_input().lower()
+ if choice == 'y':
+ break
+ elif choice == 'n':
+ raise SystemExit('Operation aborted')
+ tty.warn('Please reply either "y" or "n"')
+
+
+def gray_hash(spec, length):
+ return colorize('@K{%s}' % spec.dag_hash(length))
+
+
+def display_specs(specs, **kwargs):
+ mode = kwargs.get('mode', 'short')
+ hashes = kwargs.get('long', False)
+ namespace = kwargs.get('namespace', False)
+ flags = kwargs.get('show_flags', False)
+ variants = kwargs.get('variants', False)
+
+ hlen = 7
+ if kwargs.get('very_long', False):
+ hashes = True
+ hlen = None
+
+ nfmt = '.' if namespace else '_'
+ ffmt = '$%+' if flags else ''
+ vfmt = '$+' if variants else ''
+ format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
+
+ # Make a dict with specs keyed by architecture and compiler.
+ index = index_by(specs, ('architecture', 'compiler'))
+
+ # Traverse the index and print out each package
+ for i, (architecture, compiler) in enumerate(sorted(index)):
+ if i > 0:
+ print
+
+ header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
+ architecture, spack.spec.compiler_color,
+ compiler)
+ tty.hline(colorize(header), char='-')
+
+ specs = index[(architecture, compiler)]
+ specs.sort()
+
+ abbreviated = [s.format(format_string, color=True) for s in specs]
+ if mode == 'paths':
+ # Print one spec per line along with prefix path
+ width = max(len(s) for s in abbreviated)
+ width += 2
+ format = " %%-%ds%%s" % width
+
+ for abbrv, spec in zip(abbreviated, specs):
+ if hashes:
+ print(gray_hash(spec, hlen), )
+ print(format % (abbrv, spec.prefix))
+
+ elif mode == 'deps':
+ for spec in specs:
+ print(spec.tree(
+ format=format_string,
+ color=True,
+ indent=4,
+ prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
+
+ elif mode == 'short':
+ # Print columns of output if not printing flags
+ if not flags:
+
+ def fmt(s):
+ string = ""
+ if hashes:
+ string += gray_hash(s, hlen) + ' '
+ string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
+
+ return string
+
+ colify(fmt(s) for s in specs)
+ # Print one entry per line if including flags
+ else:
+ for spec in specs:
+ # Print the hash if necessary
+ hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
+ print(hsh + spec.format(format_string, color=True) + '\n')
+
+ else:
+ raise ValueError(
+ "Invalid mode for display_specs: %s. Must be one of (paths,"
+ "deps, short)." % mode) # NOQA: ignore=E501
diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/cmd/common/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
new file mode 100644
index 0000000000..af04170824
--- /dev/null
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -0,0 +1,96 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import argparse
+
+import spack.modules
+from spack.util.pattern import Bunch
+__all__ = ['add_common_arguments']
+
+_arguments = {}
+
+
+def add_common_arguments(parser, list_of_arguments):
+ for argument in list_of_arguments:
+ if argument not in _arguments:
+ message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501
+ raise KeyError(message.format(argument))
+ x = _arguments[argument]
+ parser.add_argument(*x.flags, **x.kwargs)
+
+
+class ConstraintAction(argparse.Action):
+ """Constructs a list of specs based on a constraint given on the command line
+
+ An instance of this class is supposed to be used as an argument action
+ in a parser. It will read a constraint and will attach a list of matching
+ specs to the namespace
+ """
+ qualifiers = {}
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ # Query specs from command line
+ d = self.qualifiers.get(namespace.subparser_name, {})
+ specs = [s for s in spack.installed_db.query(**d)]
+ values = ' '.join(values)
+ if values:
+ specs = [x for x in specs if x.satisfies(values, strict=True)]
+ namespace.specs = specs
+
+parms = Bunch(
+ flags=('constraint',),
+ kwargs={
+ 'nargs': '*',
+ 'help': 'Constraint to select a subset of installed packages',
+ 'action': ConstraintAction
+ })
+_arguments['constraint'] = parms
+
+parms = Bunch(
+ flags=('-m', '--module-type'),
+ kwargs={
+ 'help': 'Type of module files',
+ 'default': 'tcl',
+ 'choices': spack.modules.module_types
+ })
+_arguments['module_type'] = parms
+
+parms = Bunch(
+ flags=('-y', '--yes-to-all'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'yes_to_all',
+ 'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
+ })
+_arguments['yes_to_all'] = parms
+
+parms = Bunch(
+ flags=('-r', '--dependencies'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'recurse_dependencies',
+ 'help': 'Recursively traverse spec dependencies'
+ })
+_arguments['recurse_dependencies'] = parms
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 3ec671f93f..d3ea38c573 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -31,7 +31,7 @@ import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
-from llnl.util.lang import *
+from spack.cmd import display_specs
description = "Find installed spack packages"
@@ -104,89 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results')
-def gray_hash(spec, length):
- return colorize('@K{%s}' % spec.dag_hash(length))
-
-
-def display_specs(specs, **kwargs):
- mode = kwargs.get('mode', 'short')
- hashes = kwargs.get('long', False)
- namespace = kwargs.get('namespace', False)
- flags = kwargs.get('show_flags', False)
- variants = kwargs.get('variants', False)
-
- hlen = 7
- if kwargs.get('very_long', False):
- hashes = True
- hlen = None
-
- nfmt = '.' if namespace else '_'
- ffmt = '$%+' if flags else ''
- vfmt = '$+' if variants else ''
- format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
-
- # Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
-
- # Traverse the index and print out each package
- for i, (architecture, compiler) in enumerate(sorted(index)):
- if i > 0:
- print
-
- header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
- architecture, spack.spec.compiler_color,
- compiler)
- tty.hline(colorize(header), char='-')
-
- specs = index[(architecture, compiler)]
- specs.sort()
-
- abbreviated = [s.format(format_string, color=True) for s in specs]
- if mode == 'paths':
- # Print one spec per line along with prefix path
- width = max(len(s) for s in abbreviated)
- width += 2
- format = " %%-%ds%%s" % width
-
- for abbrv, spec in zip(abbreviated, specs):
- if hashes:
- print(gray_hash(spec, hlen), )
- print(format % (abbrv, spec.prefix))
-
- elif mode == 'deps':
- for spec in specs:
- print(spec.tree(
- format=format_string,
- color=True,
- indent=4,
- prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
-
- elif mode == 'short':
- # Print columns of output if not printing flags
- if not flags:
-
- def fmt(s):
- string = ""
- if hashes:
- string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
-
- return string
-
- colify(fmt(s) for s in specs)
- # Print one entry per line if including flags
- else:
- for spec in specs:
- # Print the hash if necessary
- hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
- print(hsh + spec.format(format_string, color=True) + '\n')
-
- else:
- raise ValueError(
- "Invalid mode for display_specs: %s. Must be one of (paths,"
- "deps, short)." % mode) # NOQA: ignore=E501
-
-
def query_arguments(args):
# Check arguments
if args.explicit and args.implicit:
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 55826d133c..a10e36e077 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -23,135 +23,233 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import print_function
+
+import collections
import os
import shutil
import sys
import llnl.util.tty as tty
import spack.cmd
-from llnl.util.filesystem import mkdirp
+import spack.cmd.common.arguments as arguments
+import llnl.util.filesystem as filesystem
from spack.modules import module_types
-from spack.util.string import *
-
-description = "Manipulate modules and dotkits."
-
-
-def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
-
- sp.add_parser('refresh', help='Regenerate all module files.')
- find_parser = sp.add_parser('find', help='Find module files for packages.')
+description = "Manipulate module files"
- find_parser.add_argument(
- 'module_type',
- help="Type of module to find file for. [" +
- '|'.join(module_types) + "]")
+# Dictionary that will be populated with the list of sub-commands
+# Each sub-command must be callable and accept 3 arguments :
+# - mtype : the type of the module file
+# - specs : the list of specs to be processed
+# - args : namespace containing the parsed command line arguments
+callbacks = {}
- find_parser.add_argument(
- '-r', '--dependencies', action='store_true',
- dest='recurse_dependencies',
- help='Recursively traverse dependencies for modules to load.')
- find_parser.add_argument(
- '-s', '--shell', action='store_true', dest='shell',
- help='Generate shell script (instead of input for module command)')
+def subcommand(subparser_name):
+ """Registers a function in the callbacks dictionary"""
+ def decorator(callback):
+ callbacks[subparser_name] = callback
+ return callback
+ return decorator
- find_parser.add_argument(
- '-p', '--prefix', dest='prefix',
- help='Prepend to module names when issuing module load commands')
- find_parser.add_argument(
- 'spec', nargs='+',
- help='spec to find a module file for.')
-
-
-def module_find(mtype, flags, spec_array):
- """Look at all installed packages and see if the spec provided
- matches any. If it does, check whether there is a module file
- of type <mtype> there, and print out the name that the user
- should type to use that package's module.
- prefix:
- Prepend this to module names when issuing "module load" commands.
- Some systems seem to need it.
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
+
+ # spack module refresh
+ refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
+ refresh_parser.add_argument(
+ '--delete-tree',
+ help='Delete the module file tree before refresh',
+ action='store_true'
+ )
+ arguments.add_common_arguments(
+ refresh_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
+
+ # spack module find
+ find_parser = sp.add_parser('find', help='Find module files for packages')
+ arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
+
+ # spack module rm
+ rm_parser = sp.add_parser('rm', help='Remove module files')
+ arguments.add_common_arguments(
+ rm_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
+
+ # spack module loads
+ loads_parser = sp.add_parser(
+ 'loads',
+ help='Prompt the list of modules associated with a constraint'
+ )
+ loads_parser.add_argument(
+ '--input-only', action='store_false', dest='shell',
+ help='Generate input for module command (instead of a shell script)'
+ )
+ loads_parser.add_argument(
+ '-p', '--prefix', dest='prefix', default='',
+ help='Prepend to module names when issuing module load commands'
+ )
+ arguments.add_common_arguments(
+ loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
+ )
+
+
+class MultipleMatches(Exception):
+ pass
+
+
+class NoMatch(Exception):
+ pass
+
+
+@subcommand('loads')
+def loads(mtype, specs, args):
+ """Prompt the list of modules associated with a list of specs"""
+ # Get a comprehensive list of specs
+ if args.recurse_dependencies:
+ specs_from_user_constraint = specs[:]
+ specs = []
+ # FIXME : during module file creation nodes seem to be visited
+ # FIXME : multiple times even if cover='nodes' is given. This
+ # FIXME : work around permits to get a unique list of spec anyhow.
+ # FIXME : (same problem as in spack/modules.py)
+ seen = set()
+ seen_add = seen.add
+ for spec in specs_from_user_constraint:
+ specs.extend(
+ [item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501
+ )
+
+ module_cls = module_types[mtype]
+ modules = [(spec, module_cls(spec).use_name)
+ for spec in specs if os.path.exists(module_cls(spec).file_name)]
+
+ module_commands = {
+ 'tcl': 'module load ',
+ 'dotkit': 'dotkit use '
+ }
+
+ d = {
+ 'command': '' if not args.shell else module_commands[mtype],
+ 'prefix': args.prefix
+ }
+
+ prompt_template = '{comment}{command}{prefix}{name}'
+ for spec, mod in modules:
+ d['comment'] = '' if not args.shell else '# {0}\n'.format(
+ spec.format())
+ d['name'] = mod
+ print(prompt_template.format(**d))
+
+
+@subcommand('find')
+def find(mtype, specs, args):
"""
- if mtype not in module_types:
- tty.die("Invalid module type: '%s'. Options are %s" %
- (mtype, comma_or(module_types)))
-
- # --------------------------------------
- def _find_modules(spec, modules_list):
- """Finds all modules and sub-modules for a spec"""
- if str(spec.version) == 'system':
- # No Spack module for system-installed packages
- return
-
- if flags.recurse_dependencies:
- for dep in spec.dependencies():
- _find_modules(dep, modules_list)
-
- mod = module_types[mtype](spec)
- if not os.path.isfile(mod.file_name):
- tty.die("No %s module is installed for %s" % (mtype, spec))
- modules_list.append((spec, mod))
-
-
- # --------------------------------------
- raw_specs = spack.cmd.parse_specs(spec_array)
- modules = set() # Modules we will load
- seen = set()
- for raw_spec in raw_specs:
-
- # ----------- Make sure the spec only resolves to ONE thing
- specs = spack.installed_db.query(raw_spec)
- if len(specs) == 0:
- tty.die("No installed packages match spec %s" % raw_spec)
-
- if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % raw_spec)
- for s in specs:
- sys.stderr.write(s.tree(color=True))
- sys.exit(1)
- spec = specs[0]
-
- # ----------- Chase down modules for it and all its dependencies
- modules_dups = list()
- _find_modules(spec, modules_dups)
-
- # Remove duplicates while keeping order
- modules_unique = list()
- for spec,mod in modules_dups:
- if mod.use_name not in seen:
- modules_unique.append((spec,mod))
- seen.add(mod.use_name)
-
- # Output...
- if flags.shell:
- module_cmd = {'tcl': 'module load', 'dotkit': 'dotkit use'}[mtype]
- for spec,mod in modules_unique:
- if flags.shell:
- print('# %s' % spec.format())
- print('%s %s%s' % (module_cmd, flags.prefix, mod.use_name))
- else:
- print(mod.use_name)
-
-def module_refresh():
- """Regenerate all module files for installed packages known to
- spack (some packages may no longer exist)."""
- specs = [s for s in spack.installed_db.query(installed=True, known=True)]
-
- for name, cls in module_types.items():
- tty.msg("Regenerating %s module files." % name)
- if os.path.isdir(cls.path):
- shutil.rmtree(cls.path, ignore_errors=False)
- mkdirp(cls.path)
- for spec in specs:
- cls(spec).write()
+ Look at all installed packages and see if the spec provided
+ matches any. If it does, check whether there is a module file
+ of type <mtype> there, and print out the name that the user
+ should type to use that package's module.
+ """
+ if len(specs) == 0:
+ raise NoMatch()
+
+ if len(specs) > 1:
+ raise MultipleMatches()
+
+ spec = specs.pop()
+ mod = module_types[mtype](spec)
+ if not os.path.isfile(mod.file_name):
+ tty.die("No %s module is installed for %s" % (mtype, spec))
+ print(mod.use_name)
+
+
+@subcommand('rm')
+def rm(mtype, specs, args):
+ """Deletes module files associated with items in specs"""
+ module_cls = module_types[mtype]
+ specs_with_modules = [
+ spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
+ modules = [module_cls(spec) for spec in specs_with_modules]
+
+ if not modules:
+ tty.msg('No module file matches your query')
+ raise SystemExit(1)
+
+ # Ask for confirmation
+ if not args.yes_to_all:
+ tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501
+ spack.cmd.display_specs(specs_with_modules, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ # Remove the module files
+ for s in modules:
+ s.remove()
+
+
+@subcommand('refresh')
+def refresh(mtype, specs, args):
+ """Regenerate module files for item in specs"""
+ # Prompt a message to the user about what is going to change
+ if not specs:
+ tty.msg('No package matches your query')
+ return
+
+ if not args.yes_to_all:
+ tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501
+ spack.cmd.display_specs(specs, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ cls = module_types[mtype]
+
+ # Detect name clashes
+ writers = [cls(spec) for spec in specs]
+ file2writer = collections.defaultdict(list)
+ for item in writers:
+ file2writer[item.file_name].append(item)
+
+ if len(file2writer) != len(writers):
+ message = 'Name clashes detected in module files:\n'
+ for filename, writer_list in file2writer.items():
+ if len(writer_list) > 1:
+ message += '\nfile : {0}\n'.format(filename)
+ for x in writer_list:
+ message += 'spec : {0}\n'.format(x.spec.format(color=True))
+ tty.error(message)
+ tty.error('Operation aborted')
+ raise SystemExit(1)
+
+ # Proceed regenerating module files
+ tty.msg('Regenerating {name} module files'.format(name=mtype))
+ if os.path.isdir(cls.path) and args.delete_tree:
+ shutil.rmtree(cls.path, ignore_errors=False)
+ filesystem.mkdirp(cls.path)
+ for x in writers:
+ x.write(overwrite=True)
def module(parser, args):
- if args.module_command == 'refresh':
- module_refresh()
-
- elif args.module_command == 'find':
- module_find(args.module_type, args, args.spec)
+ # Qualifiers to be used when querying the db for specs
+ constraint_qualifiers = {
+ 'refresh': {
+ 'installed': True,
+ 'known': True
+ },
+ }
+ arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
+
+ module_type = args.module_type
+ constraint = args.constraint
+ try:
+ callbacks[args.subparser_name](module_type, args.specs, args)
+ except MultipleMatches:
+ message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501
+ tty.error(message.format(query=constraint))
+ for s in args.specs:
+ sys.stderr.write(s.format(color=True) + '\n')
+ raise SystemExit(1)
+ except NoMatch:
+ message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501
+ tty.die(message.format(query=constraint))
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index a6f08d09ed..a17b7c685c 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -30,7 +30,6 @@ import llnl.util.tty as tty
import spack
import spack.cmd
import spack.repository
-from spack.cmd.find import display_specs
description = "Remove an installed package"
@@ -43,21 +42,10 @@ error_message = """You can either:
display_args = {
'long': True,
'show_flags': True,
- 'variants':True
+ 'variants': True
}
-def ask_for_confirmation(message):
- while True:
- tty.msg(message + '[y/n]')
- choice = raw_input().lower()
- if choice == 'y':
- break
- elif choice == 'n':
- raise SystemExit('Operation aborted')
- tty.warn('Please reply either "y" or "n"')
-
-
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
@@ -65,32 +53,37 @@ def setup_parser(subparser):
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " +
- "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
- "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
+ "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501
+ "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
- help='Also uninstall any packages that depend on the ones given via command line.'
+ help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501
)
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
- help='Assume "yes" is the answer to every confirmation asked to the user.'
+ help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
)
- subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+ subparser.add_argument(
+ 'packages',
+ nargs=argparse.REMAINDER,
+ help="specs of packages to uninstall"
+ )
def concretize_specs(specs, allow_multiple_matches=False, force=False):
- """
- Returns a list of specs matching the non necessarily concretized specs given from cli
+ """Returns a list of specs matching the non necessarily
+ concretized specs given from cli
Args:
specs: list of specs to be matched against installed packages
- allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
+ allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
- specs_from_cli = [] # List of specs that match expressions given via command line
+ # List of specs that match expressions given via command line
+ specs_from_cli = []
has_errors = False
for spec in specs:
matching = spack.installed_db.query(spec)
@@ -99,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, **display_args)
+ spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
@@ -116,8 +109,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs):
- """
- Returns a dictionary that maps a spec with a list of its installed dependents
+ """Returns a dictionary that maps a spec with a list of its
+ installed dependents
Args:
specs: list of specs to be checked for dependents
@@ -147,7 +140,7 @@ def do_uninstall(specs, force):
try:
# should work if package is known to spack
packages.append(item.package)
- except spack.repository.UnknownPackageError as e:
+ except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(item).do_uninstall(force=True)
@@ -169,17 +162,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli
- uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
- dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
+ # takes care of '-a' is given in the cli
+ uninstall_list = concretize_specs(specs, args.all, args.force)
+ dependent_list = installed_dependents(
+ uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list
has_error = False
if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items():
- tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
+ tty.error("Will not uninstall %s" %
+ spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
- display_specs(lst, **display_args)
+ spack.cmd.display_specs(lst, **display_args)
print('')
has_error = True
elif args.dependents:
@@ -188,14 +184,14 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))
if has_error:
- tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
+ tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
- display_specs(uninstall_list, **display_args)
+ spack.cmd.display_specs(uninstall_list, **display_args)
print('')
- ask_for_confirmation('Do you want to proceed ? ')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 84179e1469..8b5e96f97d 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -328,6 +328,11 @@ section_schemas = {
'anyOf': [
{
'properties': {
+ 'hash_length': {
+ 'type': 'integer',
+ 'minimum': 0,
+ 'default': 7
+ },
'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': {
@@ -492,8 +497,15 @@ class ConfigScope(object):
"""Empty cached config information."""
self.sections = {}
+"""Default configuration scope is the lowest-level scope. These are
+ versioned with Spack and can be overridden by sites or users."""
+ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults'))
-ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
+"""Site configuration is per spack instance, for sites or projects.
+ No site-level configs should be checked into spack by default."""
+ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
+
+"""User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack'))
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index a2e528d295..72656b8ae0 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -188,6 +188,8 @@ def parse_config_options(module_generator):
#####
# Automatic loading loads
+ module_file_actions['hash_length'] = module_configuration.get(
+ 'hash_length', 7)
module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites
@@ -237,6 +239,7 @@ class EnvModule(object):
formats = {}
class __metaclass__(type):
+
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION[
@@ -295,7 +298,9 @@ class EnvModule(object):
if constraint in self.spec:
suffixes.append(suffix)
# Always append the hash to make the module file unique
- suffixes.append(self.spec.dag_hash())
+ hash_length = configuration.pop('hash_length', 7)
+ if hash_length != 0:
+ suffixes.append(self.spec.dag_hash(length=hash_length))
name = '-'.join(suffixes)
return name
@@ -338,7 +343,7 @@ class EnvModule(object):
return False
- def write(self):
+ def write(self, overwrite=False):
"""
Writes out a module file for this object.
@@ -399,6 +404,15 @@ class EnvModule(object):
for line in self.module_specific_content(module_configuration):
module_file_content += line
+ # Print a warning in case I am accidentally overwriting
+ # a module file that is already there (name clash)
+ if not overwrite and os.path.exists(self.file_name):
+ message = 'Module file already exists : skipping creation\n'
+ message += 'file : {0.file_name}\n'
+ message += 'spec : {0.spec}'
+ tty.warn(message.format(self))
+ return
+
# Dump to file
with open(self.file_name, 'w') as f:
f.write(module_file_content)
@@ -454,7 +468,7 @@ class EnvModule(object):
class Dotkit(EnvModule):
name = 'dotkit'
-
+ path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
@@ -466,7 +480,7 @@ class Dotkit(EnvModule):
@property
def file_name(self):
- return join_path(spack.share_path, "dotkit", self.spec.architecture,
+ return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name)
@property
@@ -494,7 +508,7 @@ class Dotkit(EnvModule):
class TclModule(EnvModule):
name = 'tcl'
-
+ path = join_path(spack.share_path, "modules")
environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
@@ -514,7 +528,7 @@ class TclModule(EnvModule):
@property
def file_name(self):
- return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name)
+ return join_path(self.path, self.spec.architecture, self.use_name)
@property
def header(self):
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 6a92c548fb..c916bfaaa2 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -1439,6 +1439,13 @@ def install_dependency_symlinks(pkg, spec, prefix):
flatten_dependencies(spec, prefix)
+def use_cray_compiler_names():
+ """Compiler names for builds that rely on cray compiler names."""
+ os.environ['CC'] = 'cc'
+ os.environ['CXX'] = 'CC'
+ os.environ['FC'] = 'ftn'
+ os.environ['F77'] = 'ftn'
+
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py
index 8dc575bb71..2b065d5bbd 100644
--- a/lib/spack/spack/platforms/cray_xc.py
+++ b/lib/spack/spack/platforms/cray_xc.py
@@ -4,15 +4,15 @@ from spack.operating_systems.linux_distro import LinuxDistro
from spack.operating_systems.cnl import Cnl
from spack.util.executable import which
+
class CrayXc(Platform):
priority = 20
front_end = 'sandybridge'
back_end = 'ivybridge'
default = 'ivybridge'
- front_os = "SuSE11"
back_os = "CNL10"
- default_os = "CNL10"
+ default_os = "CNL10"
def __init__(self):
''' Since cori doesn't have ivybridge as a front end it's better
@@ -33,21 +33,37 @@ class CrayXc(Platform):
# Could switch to use modules and fe targets for front end
# Currently using compilers by path for front end.
self.add_target('sandybridge', Target('sandybridge'))
- self.add_target('ivybridge',
+ self.add_target('ivybridge',
Target('ivybridge', 'craype-ivybridge'))
- self.add_target('haswell',
- Target('haswell','craype-haswell'))
+ self.add_target('haswell',
+ Target('haswell', 'craype-haswell'))
- self.add_operating_system('SuSE11', LinuxDistro())
+ # Front end of the cray platform is a linux distro.
+ linux_dist = LinuxDistro()
+ self.front_os = str(linux_dist)
+ self.add_operating_system(str(linux_dist), linux_dist)
self.add_operating_system('CNL10', Cnl())
@classmethod
+ def setup_platform_environment(self, pkg, env):
+ """ Change the linker to default dynamic to be more
+ similar to linux/standard linker behavior
+ """
+ env.set('CRAYPE_LINK_TYPE', 'dynamic')
+ cray_wrapper_names = join_path(spack.build_env_path, 'cray')
+ if os.path.isdir(cray_wrapper_names):
+ env.prepend_path('PATH', cray_wrapper_names)
+
+ @classmethod
def detect(self):
- if os.path.exists('/cray_home'):
- cc_verbose = which('cc')
- cc_verbose.add_default_arg('-craype-verbose')
- text = cc_verbose(output=str, error=str, ignore_errors=True).split()
+ try:
+ cc_verbose = which('ftn')
+ text = cc_verbose('-craype-verbose',
+ output=str, error=str,
+ ignore_errors=True).split()
if '-D__CRAYXC' in text:
return True
- return False
-
+ else:
+ return False
+ except:
+ return False
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 9f7da46a1a..a849d5f350 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -40,7 +40,8 @@ test_names = [
'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml',
'optional_deps', 'make_executable', 'build_system_guess', 'lock',
'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
- 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd'
+ 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd',
+ 'cmd.module'
]
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
index 371e9650e0..fa82db7733 100644
--- a/lib/spack/spack/test/cmd/find.py
+++ b/lib/spack/spack/test/cmd/find.py
@@ -27,11 +27,7 @@
import spack.cmd.find
import unittest
-
-class Bunch(object):
-
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
+from spack.util.pattern import Bunch
class FindTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py
new file mode 100644
index 0000000000..36a4a73fe6
--- /dev/null
+++ b/lib/spack/spack/test/cmd/module.py
@@ -0,0 +1,83 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import os.path
+
+import spack.cmd.module as module
+import spack.modules as modules
+import spack.test.mock_database
+
+
+class TestModule(spack.test.mock_database.MockDatabase):
+
+ def _get_module_files(self, args):
+ return [
+ modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501
+ ]
+
+ def test_module_common_operations(self):
+ parser = argparse.ArgumentParser()
+ module.setup_parser(parser)
+ # Try to remove a non existing module [tcl]
+ args = parser.parse_args(['rm', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Remove existing modules [tcl]
+ args = parser.parse_args(['rm', '-y', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+ # Add them back [tcl]
+ args = parser.parse_args(['refresh', '-y', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ # TODO : test the --delete-tree option
+ # TODO : this requires having a separate directory for test modules
+ # Try to find a module with multiple matches
+ args = parser.parse_args(['find', 'mpileaks'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Try to find a module with no matches
+ args = parser.parse_args(['find', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Try to find a module
+ args = parser.parse_args(['find', 'libelf'])
+ module.module(parser, args)
+ # Remove existing modules [dotkit]
+ args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+ # Add them back [dotkit]
+ args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ # TODO : add tests for loads and find to check the prompt format
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 6d4bcb1039..bc5e9d2ffe 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -28,42 +28,50 @@ import functools
def composite(interface=None, method_list=None, container=list):
- """
- Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given
- interface.
+ """Returns a class decorator that patches a class adding all the methods
+ it needs to be a composite for a given interface.
- :param interface: class exposing the interface to which the composite object must conform. Only non-private and
- non-special methods will be taken into account
+ :param interface: class exposing the interface to which the composite
+ object must conform. Only non-private and non-special methods will be
+ taken into account
:param method_list: names of methods that should be part of the composite
- :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract.
- The composite class will expose the container API to manage object composition
+ :param container: container for the composite object (default = list).
+ Must fulfill the MutableSequence contract. The composite class will expose
+ the container API to manage object composition
:return: class decorator
"""
- # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't
- # The patched class returned by the decorator will inherit from the container class to expose the
- # interface needed to manage objects composition
+ # Check if container fulfills the MutableSequence contract and raise an
+ # exception if it doesn't. The patched class returned by the decorator will
+ # inherit from the container class to expose the interface needed to manage
+ # objects composition
if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract")
- # Check if at least one of the 'interface' or the 'method_list' arguments are defined
+ # Check if at least one of the 'interface' or the 'method_list' arguments
+ # are defined
if interface is None and method_list is None:
- raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite")
+ raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501
def cls_decorator(cls):
- # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden
+ # Retrieve the base class of the composite. Inspect its methods and
+ # decide which ones will be overridden
def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_')
- # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the
- # descriptor below to any method that needs to be patched.
+ # Patch the behavior of each of the methods in the previous list.
+ # This is done associating an instance of the descriptor below to
+ # any method that needs to be patched.
class IterateOver(object):
+ """Decorator used to patch methods in a composite.
+
+ It iterates over all the items in the instance containing the
+ associated attribute and calls for each of them an attribute
+ with the same name
"""
- Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
- associated attribute and calls for each of them an attribute with the same name
- """
+
def __init__(self, name, func=None):
self.name = name
self.func = func
@@ -72,8 +80,9 @@ def composite(interface=None, method_list=None, container=list):
def getter(*args, **kwargs):
for item in instance:
getattr(item, self.name)(*args, **kwargs)
- # If we are using this descriptor to wrap a method from an interface, then we must conditionally
- # use the `functools.wraps` decorator to set the appropriate fields.
+ # If we are using this descriptor to wrap a method from an
+ # interface, then we must conditionally use the
+ # `functools.wraps` decorator to set the appropriate fields
if self.func is not None:
getter = functools.wraps(self.func)(getter)
return getter
@@ -81,7 +90,8 @@ def composite(interface=None, method_list=None, container=list):
dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list}
+ # python@2.7: method_list_dict = {name: IterateOver(name) for name
+ # in method_list}
method_list_dict = {}
for name in method_list:
method_list_dict[name] = IterateOver(name)
@@ -89,28 +99,40 @@ def composite(interface=None, method_list=None, container=list):
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
##########
- # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)}
+ # python@2.7: interface_methods = {name: method for name, method in
+ # inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {}
- for name, method in inspect.getmembers(interface, predicate=no_special_no_private):
+ for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501
interface_methods[name] = method
##########
- # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()}
+ # python@2.7: interface_methods_dict = {name: IterateOver(name,
+ # method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {}
for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method)
##########
dictionary_for_type_call.update(interface_methods_dict)
- # Get the methods that are defined in the scope of the composite class and override any previous definition
+ # Get the methods that are defined in the scope of the composite
+ # class and override any previous definition
##########
- # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)}
+ # python@2.7: cls_method = {name: method for name, method in
+ # inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {}
- for name, method in inspect.getmembers(cls, predicate=inspect.ismethod):
+ for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501
cls_method[name] = method
##########
dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
- wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
+ wrapper_class = type(cls.__name__, (cls, container),
+ dictionary_for_type_call)
return wrapper_class
return cls_decorator
+
+
+class Bunch(object):
+ """Carries a bunch of named attributes (from Alex Martelli bunch)"""
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)