summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authoralalazo <massimiliano.culpo@googlemail.com>2016-07-20 21:02:19 +0200
committeralalazo <massimiliano.culpo@googlemail.com>2016-07-20 21:02:19 +0200
commit40cb314638a01be2cf2e719a48cfb381b039a6a7 (patch)
treebbbb68691fbd47032b6313ecc8e3debfe7426830 /lib
parent6c00a13ed5d76e5cb0e8e493a20c498bbfa1111c (diff)
parent79e066d53c2baa4bf84bdaa84502e7d282020c0f (diff)
downloadspack-40cb314638a01be2cf2e719a48cfb381b039a6a7.tar.gz
spack-40cb314638a01be2cf2e719a48cfb381b039a6a7.tar.bz2
spack-40cb314638a01be2cf2e719a48cfb381b039a6a7.tar.xz
spack-40cb314638a01be2cf2e719a48cfb381b039a6a7.zip
Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases
Conflicts: lib/spack/spack/package.py
Diffstat (limited to 'lib')
-rwxr-xr-xlib/spack/env/cc4
l---------lib/spack/env/craype/CC1
l---------lib/spack/env/craype/cc1
l---------lib/spack/env/craype/ftn1
-rw-r--r--lib/spack/llnl/util/filesystem.py12
-rw-r--r--lib/spack/spack/cmd/__init__.py120
-rw-r--r--lib/spack/spack/cmd/bootstrap.py49
-rw-r--r--lib/spack/spack/cmd/checksum.py21
-rw-r--r--lib/spack/spack/cmd/common/__init__.py24
-rw-r--r--lib/spack/spack/cmd/common/arguments.py96
-rw-r--r--lib/spack/spack/cmd/create.py145
-rw-r--r--lib/spack/spack/cmd/find.py85
-rw-r--r--lib/spack/spack/cmd/info.py7
-rw-r--r--lib/spack/spack/cmd/module.py328
-rw-r--r--lib/spack/spack/cmd/package-list.py24
-rw-r--r--lib/spack/spack/cmd/uninstall.py62
-rw-r--r--lib/spack/spack/config.py14
-rw-r--r--lib/spack/spack/database.py25
-rw-r--r--lib/spack/spack/directory_layout.py17
-rw-r--r--lib/spack/spack/modules.py26
-rw-r--r--lib/spack/spack/package.py23
-rw-r--r--lib/spack/spack/platforms/cray_xc.py29
-rw-r--r--lib/spack/spack/preferred_packages.py105
-rw-r--r--lib/spack/spack/spec.py292
-rw-r--r--lib/spack/spack/test/__init__.py20
-rw-r--r--lib/spack/spack/test/architecture.py64
-rw-r--r--lib/spack/spack/test/build_system_guess.py (renamed from lib/spack/spack/test/configure_guess.py)26
-rw-r--r--lib/spack/spack/test/cmd/find.py6
-rw-r--r--lib/spack/spack/test/cmd/module.py83
-rw-r--r--lib/spack/spack/test/modules.py3
-rw-r--r--lib/spack/spack/util/pattern.py78
31 files changed, 1153 insertions, 638 deletions
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index bf98b4c354..c6bb50d261 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -110,13 +110,13 @@ case "$command" in
comp="CXX"
lang_flags=CXX
;;
- f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
+ ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
;;
- f77|gfortran|ifort|pgfortran|xlf|nagfor)
+ f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
diff --git a/lib/spack/env/craype/CC b/lib/spack/env/craype/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/cc b/lib/spack/env/craype/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/ftn b/lib/spack/env/craype/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index e800c6717a..6e4cd338fe 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -42,7 +42,7 @@ __all__ = ['set_install_permissions', 'install', 'install_tree',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
- 'fix_darwin_install_name', 'to_link_flags']
+ 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
def filter_file(regex, repl, *filenames, **kwargs):
@@ -431,6 +431,13 @@ def fix_darwin_install_name(path):
break
+def to_lib_name(library):
+ """Transforms a path to the library /path/to/lib<name>.xyz into <name>
+ """
+ # Assume libXYZ.suffix
+ return os.path.basename(library)[3:].split(".")[0]
+
+
def to_link_flags(library):
"""Transforms a path to a <library> into linking flags -L<dir> -l<name>.
@@ -438,8 +445,7 @@ def to_link_flags(library):
A string of linking flags.
"""
dir = os.path.dirname(library)
- # Assume libXYZ.suffix
- name = os.path.basename(library)[3:].split(".")[0]
+ name = to_lib_name(library)
res = '-L%s -l%s' % (dir, name)
return res
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 672999159c..230115df50 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -27,16 +27,18 @@ import re
import sys
import llnl.util.tty as tty
-from llnl.util.lang import attr_setdefault
-
import spack
-import spack.spec
import spack.config
+import spack.spec
+from llnl.util.lang import *
+from llnl.util.tty.colify import *
+from llnl.util.tty.color import *
#
# Settings for commands that modify configuration
#
-# Commands that modify confguration By default modify the *highest* priority scope.
+# Commands that modify confguration By default modify the *highest*
+# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default.
default_list_scope = None
@@ -48,7 +50,7 @@ python_list = list
ignore_files = r'^\.|^__init__.py$|^#'
SETUP_PARSER = "setup_parser"
-DESCRIPTION = "description"
+DESCRIPTION = "description"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
@@ -71,7 +73,7 @@ def get_module(name):
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
- attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
+ attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
@@ -101,17 +103,17 @@ def parse_specs(args, **kwargs):
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
- spec.concretize() # implies normalize
+ spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
- except spack.parse.ParseError, e:
+ except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1)
- except spack.spec.SpecError, e:
+ except spack.spec.SpecError as e:
tty.error(e.message)
sys.exit(1)
@@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
- return line_list[:max_num-1] + ['...'] + line_list[-1:]
+ return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
@@ -138,10 +140,104 @@ def disambiguate_spec(spec):
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
+ args = ["%s matches multiple packages." % spec,
+ "Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]
+
+
+def ask_for_confirmation(message):
+ while True:
+ tty.msg(message + '[y/n]')
+ choice = raw_input().lower()
+ if choice == 'y':
+ break
+ elif choice == 'n':
+ raise SystemExit('Operation aborted')
+ tty.warn('Please reply either "y" or "n"')
+
+
+def gray_hash(spec, length):
+ return colorize('@K{%s}' % spec.dag_hash(length))
+
+
+def display_specs(specs, **kwargs):
+ mode = kwargs.get('mode', 'short')
+ hashes = kwargs.get('long', False)
+ namespace = kwargs.get('namespace', False)
+ flags = kwargs.get('show_flags', False)
+ variants = kwargs.get('variants', False)
+
+ hlen = 7
+ if kwargs.get('very_long', False):
+ hashes = True
+ hlen = None
+
+ nfmt = '.' if namespace else '_'
+ ffmt = '$%+' if flags else ''
+ vfmt = '$+' if variants else ''
+ format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
+
+ # Make a dict with specs keyed by architecture and compiler.
+ index = index_by(specs, ('architecture', 'compiler'))
+
+ # Traverse the index and print out each package
+ for i, (architecture, compiler) in enumerate(sorted(index)):
+ if i > 0:
+ print
+
+ header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
+ architecture, spack.spec.compiler_color,
+ compiler)
+ tty.hline(colorize(header), char='-')
+
+ specs = index[(architecture, compiler)]
+ specs.sort()
+
+ abbreviated = [s.format(format_string, color=True) for s in specs]
+ if mode == 'paths':
+ # Print one spec per line along with prefix path
+ width = max(len(s) for s in abbreviated)
+ width += 2
+ format = " %%-%ds%%s" % width
+
+ for abbrv, spec in zip(abbreviated, specs):
+ if hashes:
+ print(gray_hash(spec, hlen), )
+ print(format % (abbrv, spec.prefix))
+
+ elif mode == 'deps':
+ for spec in specs:
+ print(spec.tree(
+ format=format_string,
+ color=True,
+ indent=4,
+ prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
+
+ elif mode == 'short':
+ # Print columns of output if not printing flags
+ if not flags:
+
+ def fmt(s):
+ string = ""
+ if hashes:
+ string += gray_hash(s, hlen) + ' '
+ string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
+
+ return string
+
+ colify(fmt(s) for s in specs)
+ # Print one entry per line if including flags
+ else:
+ for spec in specs:
+ # Print the hash if necessary
+ hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
+ print(hsh + spec.format(format_string, color=True) + '\n')
+
+ else:
+ raise ValueError(
+ "Invalid mode for display_specs: %s. Must be one of (paths,"
+ "deps, short)." % mode) # NOQA: ignore=E501
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index bec11439b5..60e2bd3a11 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from subprocess import check_call
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
@@ -31,26 +30,49 @@ from llnl.util.filesystem import join_path, mkdirp
import spack
from spack.util.executable import which
+_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
+
description = "Create a new installation of spack in another prefix"
+
def setup_parser(subparser):
- subparser.add_argument('prefix', help="names of prefix where we should install spack")
+ subparser.add_argument(
+ '-r', '--remote', action='store', dest='remote',
+ help="name of the remote to bootstrap from", default='origin')
+ subparser.add_argument(
+ 'prefix',
+ help="names of prefix where we should install spack")
-def get_origin_url():
+def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git')
git = which('git', required=True)
- origin_url = git(
- '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
- output=str)
- return origin_url.strip()
+ try:
+ branch = git('symbolic-ref', '--short', 'HEAD', output=str)
+ except ProcessError:
+ branch = 'develop'
+ tty.warn('No branch found; using default branch: %s' % branch)
+ if remote == 'origin' and \
+ branch not in ('master', 'develop'):
+ branch = 'develop'
+ tty.warn('Unknown branch found; using default branch: %s' % branch)
+ try:
+ origin_url = git(
+ '--git-dir=%s' % git_dir,
+ 'config', '--get', 'remote.%s.url' % remote,
+ output=str)
+ except ProcessError:
+ origin_url = _SPACK_UPSTREAM
+ tty.warn('No git repository found; '
+ 'using default upstream URL: %s' % origin_url)
+ return (origin_url.strip(), branch.strip())
def bootstrap(parser, args):
- origin_url = get_origin_url()
+ origin_url, branch = get_origin_info(args.remote)
prefix = args.prefix
- tty.msg("Fetching spack from origin: %s" % origin_url)
+ tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
if os.path.isfile(prefix):
tty.die("There is already a file at %s" % prefix)
@@ -62,7 +84,8 @@ def bootstrap(parser, args):
files_in_the_way = os.listdir(prefix)
if files_in_the_way:
- tty.die("There are already files there! Delete these files before boostrapping spack.",
+ tty.die("There are already files there! "
+ "Delete these files before boostrapping spack.",
*files_in_the_way)
tty.msg("Installing:",
@@ -73,8 +96,10 @@ def bootstrap(parser, args):
git = which('git', required=True)
git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url)
- git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
- git('reset', '--hard', 'origin/master', '-q')
+ git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
+ '-n', '-q')
+ git('reset', '--hard', 'origin/%s' % branch, '-q')
+ git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix)
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 95bd4771ed..aedb0fd99c 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -42,7 +42,8 @@ def setup_parser(subparser):
'--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
- 'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
+ 'versions', nargs=argparse.REMAINDER,
+ help='Versions to generate checksums for')
def get_checksums(versions, urls, **kwargs):
@@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs):
with Stage(url, keep=keep_stage) as stage:
stage.fetch()
if i == 0 and first_stage_function:
- first_stage_function(stage)
+ first_stage_function(stage, url)
- hashes.append((version,
- spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
+ hashes.append((version, spack.util.crypto.checksum(
+ hashlib.md5, stage.archive_file)))
i += 1
except FailedDownloadError as e:
tty.msg("Failed to fetch %s" % url)
@@ -79,12 +80,12 @@ def checksum(parser, args):
# If the user asked for specific versions, use those.
if args.versions:
versions = {}
- for v in args.versions:
- v = ver(v)
- if not isinstance(v, Version):
+ for version in args.versions:
+ version = ver(version)
+ if not isinstance(version, Version):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
- versions[v] = pkg.url_for_version(v)
+ versions[version] = pkg.url_for_version(version)
else:
versions = pkg.fetch_remote_versions()
if not versions:
@@ -111,5 +112,7 @@ def checksum(parser, args):
if not version_hashes:
tty.die("Could not fetch any versions for %s" % pkg.name)
- version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
+ version_lines = [
+ " version('%s', '%s')" % (v, h) for v, h in version_hashes
+ ]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/cmd/common/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
new file mode 100644
index 0000000000..af04170824
--- /dev/null
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -0,0 +1,96 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import argparse
+
+import spack.modules
+from spack.util.pattern import Bunch
+__all__ = ['add_common_arguments']
+
+_arguments = {}
+
+
+def add_common_arguments(parser, list_of_arguments):
+ for argument in list_of_arguments:
+ if argument not in _arguments:
+ message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501
+ raise KeyError(message.format(argument))
+ x = _arguments[argument]
+ parser.add_argument(*x.flags, **x.kwargs)
+
+
+class ConstraintAction(argparse.Action):
+ """Constructs a list of specs based on a constraint given on the command line
+
+ An instance of this class is supposed to be used as an argument action
+ in a parser. It will read a constraint and will attach a list of matching
+ specs to the namespace
+ """
+ qualifiers = {}
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ # Query specs from command line
+ d = self.qualifiers.get(namespace.subparser_name, {})
+ specs = [s for s in spack.installed_db.query(**d)]
+ values = ' '.join(values)
+ if values:
+ specs = [x for x in specs if x.satisfies(values, strict=True)]
+ namespace.specs = specs
+
+parms = Bunch(
+ flags=('constraint',),
+ kwargs={
+ 'nargs': '*',
+ 'help': 'Constraint to select a subset of installed packages',
+ 'action': ConstraintAction
+ })
+_arguments['constraint'] = parms
+
+parms = Bunch(
+ flags=('-m', '--module-type'),
+ kwargs={
+ 'help': 'Type of module files',
+ 'default': 'tcl',
+ 'choices': spack.modules.module_types
+ })
+_arguments['module_type'] = parms
+
+parms = Bunch(
+ flags=('-y', '--yes-to-all'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'yes_to_all',
+ 'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
+ })
+_arguments['yes_to_all'] = parms
+
+parms = Bunch(
+ flags=('-r', '--dependencies'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'recurse_dependencies',
+ 'help': 'Recursively traverse spec dependencies'
+ })
+_arguments['recurse_dependencies'] = parms
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index c9fa687b74..2c440096d1 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -103,6 +103,64 @@ ${versions}
${install}
""")
+# Build dependencies and extensions
+dependencies_dict = {
+ 'autotools': "# depends_on('foo')",
+ 'cmake': "depends_on('cmake')",
+ 'scons': "depends_on('scons')",
+ 'python': "extends('python')",
+ 'R': "extends('R')",
+ 'octave': "extends('octave')",
+ 'unknown': "# depends_on('foo')"
+}
+
+# Default installation instructions
+install_dict = {
+ 'autotools': """\
+ # FIXME: Modify the configure line to suit your build system here.
+ configure('--prefix={0}'.format(prefix))
+
+ # FIXME: Add logic to build and install here.
+ make()
+ make('install')""",
+
+ 'cmake': """\
+ with working_dir('spack-build', create=True):
+ # FIXME: Modify the cmake line to suit your build system here.
+ cmake('..', *std_cmake_args)
+
+ # FIXME: Add logic to build and install here.
+ make()
+ make('install')""",
+
+ 'scons': """\
+ # FIXME: Add logic to build and install here.
+ scons('prefix={0}'.format(prefix))
+ scons('install')""",
+
+ 'python': """\
+ # FIXME: Add logic to build and install here.
+ python('setup.py', 'install', '--prefix={0}'.format(prefix))""",
+
+ 'R': """\
+ # FIXME: Add logic to build and install here.
+ R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
+ self.stage.source_path)""",
+
+ 'octave': """\
+ # FIXME: Add logic to build and install here.
+ octave('--quiet', '--norc',
+ '--built-in-docstrings-file=/dev/null',
+ '--texi-macros-file=/dev/null',
+ '--eval', 'pkg prefix {0}; pkg install {1}'.format(
+ prefix, self.stage.archive_file))""",
+
+ 'unknown': """\
+ # FIXME: Unknown build system
+ make()
+ make('install')"""
+}
+
def make_version_calls(ver_hash_tuples):
"""Adds a version() call to the package for each version found."""
@@ -133,60 +191,17 @@ def setup_parser(subparser):
setup_parser.subparser = subparser
-class ConfigureGuesser(object):
- def __call__(self, stage):
- """Try to guess the type of build system used by the project.
- Set any necessary build dependencies or extensions.
- Set the appropriate default installation instructions."""
-
- # Build dependencies and extensions
- dependenciesDict = {
- 'autotools': "# depends_on('foo')",
- 'cmake': "depends_on('cmake', type='build')",
- 'scons': "depends_on('scons', type='build')",
- 'python': "extends('python', type=nolink)",
- 'R': "extends('R')",
- 'unknown': "# depends_on('foo')"
- }
-
- # Default installation instructions
- installDict = {
- 'autotools': """\
- # FIXME: Modify the configure line to suit your build system here.
- configure('--prefix={0}'.format(prefix))
-
- # FIXME: Add logic to build and install here.
- make()
- make('install')""",
-
- 'cmake': """\
- with working_dir('spack-build', create=True):
- # FIXME: Modify the cmake line to suit your build system here.
- cmake('..', *std_cmake_args)
+class BuildSystemGuesser(object):
+ def __call__(self, stage, url):
+ """Try to guess the type of build system used by a project based on
+ the contents of its archive or the URL it was downloaded from."""
- # FIXME: Add logic to build and install here.
- make()
- make('install')""",
-
- 'scons': """\
- # FIXME: Add logic to build and install here.
- scons('prefix={0}'.format(prefix))
- scons('install')""",
-
- 'python': """\
- # FIXME: Add logic to build and install here.
- python('setup.py', 'install', '--prefix={0}'.format(prefix))""",
-
- 'R': """\
- # FIXME: Add logic to build and install here.
- R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
- self.stage.source_path)""",
-
- 'unknown': """\
- # FIXME: Unknown build system
- make()
- make('install')"""
- }
+ # Most octave extensions are hosted on Octave-Forge:
+ # http://octave.sourceforge.net/index.html
+ # They all have the same base URL.
+ if 'downloads.sourceforge.net/octave/' in url:
+ self.build_system = 'octave'
+ return
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
@@ -224,12 +239,6 @@ class ConfigureGuesser(object):
self.build_system = build_system
- # Set any necessary build dependencies or extensions.
- self.dependencies = dependenciesDict[build_system]
-
- # Set the appropriate default installation instructions
- self.install = installDict[build_system]
-
def guess_name_and_version(url, args):
# Try to deduce name and version of the new package from the URL
@@ -334,8 +343,8 @@ def create(parser, args):
# Fetch tarballs (prompting user if necessary)
versions, urls = fetch_tarballs(url, name, version)
- # Try to guess what configure system is used.
- guesser = ConfigureGuesser()
+ # Try to guess what build system is used.
+ guesser = BuildSystemGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions, urls,
first_stage_function=guesser,
@@ -344,13 +353,13 @@ def create(parser, args):
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s" % name)
- # Prepend 'py-' to python package names, by convention.
+ # Add prefix to package name if it is an extension.
if guesser.build_system == 'python':
- name = 'py-%s' % name
-
- # Prepend 'r-' to R package names, by convention.
+ name = 'py-{0}'.format(name)
if guesser.build_system == 'R':
- name = 'r-%s' % name
+ name = 'r-{0}'.format(name)
+ if guesser.build_system == 'octave':
+ name = 'octave-{0}'.format(name)
# Create a directory for the new package.
pkg_path = repo.filename_for_package_name(name)
@@ -367,8 +376,8 @@ def create(parser, args):
class_name=mod_to_class(name),
url=url,
versions=make_version_calls(ver_hash_tuples),
- dependencies=guesser.dependencies,
- install=guesser.install))
+ dependencies=dependencies_dict[guesser.build_system],
+ install=install_dict[guesser.build_system]))
# If everything checks out, go ahead and edit.
spack.editor(pkg_path)
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 3ec671f93f..d3ea38c573 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -31,7 +31,7 @@ import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
-from llnl.util.lang import *
+from spack.cmd import display_specs
description = "Find installed spack packages"
@@ -104,89 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results')
-def gray_hash(spec, length):
- return colorize('@K{%s}' % spec.dag_hash(length))
-
-
-def display_specs(specs, **kwargs):
- mode = kwargs.get('mode', 'short')
- hashes = kwargs.get('long', False)
- namespace = kwargs.get('namespace', False)
- flags = kwargs.get('show_flags', False)
- variants = kwargs.get('variants', False)
-
- hlen = 7
- if kwargs.get('very_long', False):
- hashes = True
- hlen = None
-
- nfmt = '.' if namespace else '_'
- ffmt = '$%+' if flags else ''
- vfmt = '$+' if variants else ''
- format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
-
- # Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
-
- # Traverse the index and print out each package
- for i, (architecture, compiler) in enumerate(sorted(index)):
- if i > 0:
- print
-
- header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
- architecture, spack.spec.compiler_color,
- compiler)
- tty.hline(colorize(header), char='-')
-
- specs = index[(architecture, compiler)]
- specs.sort()
-
- abbreviated = [s.format(format_string, color=True) for s in specs]
- if mode == 'paths':
- # Print one spec per line along with prefix path
- width = max(len(s) for s in abbreviated)
- width += 2
- format = " %%-%ds%%s" % width
-
- for abbrv, spec in zip(abbreviated, specs):
- if hashes:
- print(gray_hash(spec, hlen), )
- print(format % (abbrv, spec.prefix))
-
- elif mode == 'deps':
- for spec in specs:
- print(spec.tree(
- format=format_string,
- color=True,
- indent=4,
- prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
-
- elif mode == 'short':
- # Print columns of output if not printing flags
- if not flags:
-
- def fmt(s):
- string = ""
- if hashes:
- string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
-
- return string
-
- colify(fmt(s) for s in specs)
- # Print one entry per line if including flags
- else:
- for spec in specs:
- # Print the hash if necessary
- hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
- print(hsh + spec.format(format_string, color=True) + '\n')
-
- else:
- raise ValueError(
- "Invalid mode for display_specs: %s. Must be one of (paths,"
- "deps, short)." % mode) # NOQA: ignore=E501
-
-
def query_arguments(args):
# Check arguments
if args.explicit and args.implicit:
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 5774034062..498518057b 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -29,9 +29,11 @@ import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
+
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
+
def pad(string):
string = str(string)
padding = max(0, length - len(string))
@@ -40,7 +42,8 @@ def padder(str_list, extra=0):
def setup_parser(subparser):
- subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
+ subparser.add_argument(
+ 'name', metavar="PACKAGE", help="Name of package to get info for.")
def print_text_info(pkg):
@@ -84,7 +87,7 @@ def print_text_info(pkg):
for deptype in ('build', 'link', 'run'):
print
print "%s Dependencies:" % deptype.capitalize()
- deps = pkg.dependencies(deptype)
+ deps = pkg.dependencies_of_type(deptype)
if deps:
colify(deps, indent=4)
else:
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 55826d133c..a10e36e077 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -23,135 +23,233 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import print_function
+
+import collections
import os
import shutil
import sys
import llnl.util.tty as tty
import spack.cmd
-from llnl.util.filesystem import mkdirp
+import spack.cmd.common.arguments as arguments
+import llnl.util.filesystem as filesystem
from spack.modules import module_types
-from spack.util.string import *
-
-description = "Manipulate modules and dotkits."
-
-
-def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
-
- sp.add_parser('refresh', help='Regenerate all module files.')
- find_parser = sp.add_parser('find', help='Find module files for packages.')
+description = "Manipulate module files"
- find_parser.add_argument(
- 'module_type',
- help="Type of module to find file for. [" +
- '|'.join(module_types) + "]")
+# Dictionary that will be populated with the list of sub-commands
+# Each sub-command must be callable and accept 3 arguments :
+# - mtype : the type of the module file
+# - specs : the list of specs to be processed
+# - args : namespace containing the parsed command line arguments
+callbacks = {}
- find_parser.add_argument(
- '-r', '--dependencies', action='store_true',
- dest='recurse_dependencies',
- help='Recursively traverse dependencies for modules to load.')
- find_parser.add_argument(
- '-s', '--shell', action='store_true', dest='shell',
- help='Generate shell script (instead of input for module command)')
+def subcommand(subparser_name):
+ """Registers a function in the callbacks dictionary"""
+ def decorator(callback):
+ callbacks[subparser_name] = callback
+ return callback
+ return decorator
- find_parser.add_argument(
- '-p', '--prefix', dest='prefix',
- help='Prepend to module names when issuing module load commands')
- find_parser.add_argument(
- 'spec', nargs='+',
- help='spec to find a module file for.')
-
-
-def module_find(mtype, flags, spec_array):
- """Look at all installed packages and see if the spec provided
- matches any. If it does, check whether there is a module file
- of type <mtype> there, and print out the name that the user
- should type to use that package's module.
- prefix:
- Prepend this to module names when issuing "module load" commands.
- Some systems seem to need it.
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
+
+ # spack module refresh
+ refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
+ refresh_parser.add_argument(
+ '--delete-tree',
+ help='Delete the module file tree before refresh',
+ action='store_true'
+ )
+ arguments.add_common_arguments(
+ refresh_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
+
+ # spack module find
+ find_parser = sp.add_parser('find', help='Find module files for packages')
+ arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
+
+ # spack module rm
+ rm_parser = sp.add_parser('rm', help='Remove module files')
+ arguments.add_common_arguments(
+ rm_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
+
+ # spack module loads
+ loads_parser = sp.add_parser(
+ 'loads',
+ help='Prompt the list of modules associated with a constraint'
+ )
+ loads_parser.add_argument(
+ '--input-only', action='store_false', dest='shell',
+ help='Generate input for module command (instead of a shell script)'
+ )
+ loads_parser.add_argument(
+ '-p', '--prefix', dest='prefix', default='',
+ help='Prepend to module names when issuing module load commands'
+ )
+ arguments.add_common_arguments(
+ loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
+ )
+
+
+class MultipleMatches(Exception):
+ pass
+
+
+class NoMatch(Exception):
+ pass
+
+
+@subcommand('loads')
+def loads(mtype, specs, args):
+ """Prompt the list of modules associated with a list of specs"""
+ # Get a comprehensive list of specs
+ if args.recurse_dependencies:
+ specs_from_user_constraint = specs[:]
+ specs = []
+ # FIXME : during module file creation nodes seem to be visited
+ # FIXME : multiple times even if cover='nodes' is given. This
+ # FIXME : work around permits to get a unique list of spec anyhow.
+ # FIXME : (same problem as in spack/modules.py)
+ seen = set()
+ seen_add = seen.add
+ for spec in specs_from_user_constraint:
+ specs.extend(
+ [item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501
+ )
+
+ module_cls = module_types[mtype]
+ modules = [(spec, module_cls(spec).use_name)
+ for spec in specs if os.path.exists(module_cls(spec).file_name)]
+
+ module_commands = {
+ 'tcl': 'module load ',
+ 'dotkit': 'dotkit use '
+ }
+
+ d = {
+ 'command': '' if not args.shell else module_commands[mtype],
+ 'prefix': args.prefix
+ }
+
+ prompt_template = '{comment}{command}{prefix}{name}'
+ for spec, mod in modules:
+ d['comment'] = '' if not args.shell else '# {0}\n'.format(
+ spec.format())
+ d['name'] = mod
+ print(prompt_template.format(**d))
+
+
+@subcommand('find')
+def find(mtype, specs, args):
"""
- if mtype not in module_types:
- tty.die("Invalid module type: '%s'. Options are %s" %
- (mtype, comma_or(module_types)))
-
- # --------------------------------------
- def _find_modules(spec, modules_list):
- """Finds all modules and sub-modules for a spec"""
- if str(spec.version) == 'system':
- # No Spack module for system-installed packages
- return
-
- if flags.recurse_dependencies:
- for dep in spec.dependencies():
- _find_modules(dep, modules_list)
-
- mod = module_types[mtype](spec)
- if not os.path.isfile(mod.file_name):
- tty.die("No %s module is installed for %s" % (mtype, spec))
- modules_list.append((spec, mod))
-
-
- # --------------------------------------
- raw_specs = spack.cmd.parse_specs(spec_array)
- modules = set() # Modules we will load
- seen = set()
- for raw_spec in raw_specs:
-
- # ----------- Make sure the spec only resolves to ONE thing
- specs = spack.installed_db.query(raw_spec)
- if len(specs) == 0:
- tty.die("No installed packages match spec %s" % raw_spec)
-
- if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % raw_spec)
- for s in specs:
- sys.stderr.write(s.tree(color=True))
- sys.exit(1)
- spec = specs[0]
-
- # ----------- Chase down modules for it and all its dependencies
- modules_dups = list()
- _find_modules(spec, modules_dups)
-
- # Remove duplicates while keeping order
- modules_unique = list()
- for spec,mod in modules_dups:
- if mod.use_name not in seen:
- modules_unique.append((spec,mod))
- seen.add(mod.use_name)
-
- # Output...
- if flags.shell:
- module_cmd = {'tcl': 'module load', 'dotkit': 'dotkit use'}[mtype]
- for spec,mod in modules_unique:
- if flags.shell:
- print('# %s' % spec.format())
- print('%s %s%s' % (module_cmd, flags.prefix, mod.use_name))
- else:
- print(mod.use_name)
-
-def module_refresh():
- """Regenerate all module files for installed packages known to
- spack (some packages may no longer exist)."""
- specs = [s for s in spack.installed_db.query(installed=True, known=True)]
-
- for name, cls in module_types.items():
- tty.msg("Regenerating %s module files." % name)
- if os.path.isdir(cls.path):
- shutil.rmtree(cls.path, ignore_errors=False)
- mkdirp(cls.path)
- for spec in specs:
- cls(spec).write()
+ Look at all installed packages and see if the spec provided
+ matches any. If it does, check whether there is a module file
+ of type <mtype> there, and print out the name that the user
+ should type to use that package's module.
+ """
+ if len(specs) == 0:
+ raise NoMatch()
+
+ if len(specs) > 1:
+ raise MultipleMatches()
+
+ spec = specs.pop()
+ mod = module_types[mtype](spec)
+ if not os.path.isfile(mod.file_name):
+ tty.die("No %s module is installed for %s" % (mtype, spec))
+ print(mod.use_name)
+
+
+@subcommand('rm')
+def rm(mtype, specs, args):
+ """Deletes module files associated with items in specs"""
+ module_cls = module_types[mtype]
+ specs_with_modules = [
+ spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
+ modules = [module_cls(spec) for spec in specs_with_modules]
+
+ if not modules:
+ tty.msg('No module file matches your query')
+ raise SystemExit(1)
+
+ # Ask for confirmation
+ if not args.yes_to_all:
+ tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501
+ spack.cmd.display_specs(specs_with_modules, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ # Remove the module files
+ for s in modules:
+ s.remove()
+
+
+@subcommand('refresh')
+def refresh(mtype, specs, args):
+ """Regenerate module files for item in specs"""
+ # Prompt a message to the user about what is going to change
+ if not specs:
+ tty.msg('No package matches your query')
+ return
+
+ if not args.yes_to_all:
+ tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501
+ spack.cmd.display_specs(specs, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ cls = module_types[mtype]
+
+ # Detect name clashes
+ writers = [cls(spec) for spec in specs]
+ file2writer = collections.defaultdict(list)
+ for item in writers:
+ file2writer[item.file_name].append(item)
+
+ if len(file2writer) != len(writers):
+ message = 'Name clashes detected in module files:\n'
+ for filename, writer_list in file2writer.items():
+ if len(writer_list) > 1:
+ message += '\nfile : {0}\n'.format(filename)
+ for x in writer_list:
+ message += 'spec : {0}\n'.format(x.spec.format(color=True))
+ tty.error(message)
+ tty.error('Operation aborted')
+ raise SystemExit(1)
+
+ # Proceed regenerating module files
+ tty.msg('Regenerating {name} module files'.format(name=mtype))
+ if os.path.isdir(cls.path) and args.delete_tree:
+ shutil.rmtree(cls.path, ignore_errors=False)
+ filesystem.mkdirp(cls.path)
+ for x in writers:
+ x.write(overwrite=True)
def module(parser, args):
- if args.module_command == 'refresh':
- module_refresh()
-
- elif args.module_command == 'find':
- module_find(args.module_type, args, args.spec)
+ # Qualifiers to be used when querying the db for specs
+ constraint_qualifiers = {
+ 'refresh': {
+ 'installed': True,
+ 'known': True
+ },
+ }
+ arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
+
+ module_type = args.module_type
+ constraint = args.constraint
+ try:
+ callbacks[args.subparser_name](module_type, args.specs, args)
+ except MultipleMatches:
+ message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501
+ tty.error(message.format(query=constraint))
+ for s in args.specs:
+ sys.stderr.write(s.format(color=True) + '\n')
+ raise SystemExit(1)
+ except NoMatch:
+ message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501
+ tty.die(message.format(query=constraint))
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
index bc64c77eab..a27502d30e 100644
--- a/lib/spack/spack/cmd/package-list.py
+++ b/lib/spack/spack/cmd/package-list.py
@@ -22,10 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re
import cgi
from StringIO import StringIO
-import llnl.util.tty as tty
from llnl.util.tty.colify import *
import spack
@@ -34,21 +32,22 @@ description = "Print a list of all packages in reStructuredText."
def github_url(pkg):
"""Link to a package file on github."""
- return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
- pkg.name)
+ url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501
+ return (url % pkg.name)
def rst_table(elts):
"""Print out a RST-style table."""
cols = StringIO()
ncol, widths = colify(elts, output=cols, tty=True)
- header = " ".join("=" * (w-1) for w in widths)
+ header = " ".join("=" * (w - 1) for w in widths)
return "%s\n%s%s" % (header, cols.getvalue(), header)
def print_rst_package_list():
"""Print out information on all packages in restructured text."""
- pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
+ pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower())
+ pkg_names = [p.name for p in pkgs]
print ".. _package-list:"
print
@@ -62,7 +61,7 @@ def print_rst_package_list():
print "Spack currently has %d mainline packages:" % len(pkgs)
print
- print rst_table("`%s`_" % p.name for p in pkgs)
+ print rst_table("`%s`_" % p for p in pkg_names)
print
print "-----"
@@ -79,14 +78,15 @@ def print_rst_package_list():
print
if pkg.versions:
print "Versions:"
- print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
+ print " " + ", ".join(str(v) for v in
+ reversed(sorted(pkg.versions)))
- for deptype in ('build', 'link', 'run'):
- deps = pkg.dependencies(deptype)
+ for deptype in spack.alldeps:
+ deps = pkg.dependencies_of_type(deptype)
if deps:
print "%s Dependencies" % deptype.capitalize()
- print " " + ", ".join("`%s`_" % d if d != "mpi" else d
- for d in build_deps)
+ print " " + ", ".join("%s_" % d if d in pkg_names
+ else d for d in deps)
print
print "Description:"
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index a6f08d09ed..a17b7c685c 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -30,7 +30,6 @@ import llnl.util.tty as tty
import spack
import spack.cmd
import spack.repository
-from spack.cmd.find import display_specs
description = "Remove an installed package"
@@ -43,21 +42,10 @@ error_message = """You can either:
display_args = {
'long': True,
'show_flags': True,
- 'variants':True
+ 'variants': True
}
-def ask_for_confirmation(message):
- while True:
- tty.msg(message + '[y/n]')
- choice = raw_input().lower()
- if choice == 'y':
- break
- elif choice == 'n':
- raise SystemExit('Operation aborted')
- tty.warn('Please reply either "y" or "n"')
-
-
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
@@ -65,32 +53,37 @@ def setup_parser(subparser):
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " +
- "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
- "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
+ "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501
+ "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
- help='Also uninstall any packages that depend on the ones given via command line.'
+ help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501
)
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
- help='Assume "yes" is the answer to every confirmation asked to the user.'
+ help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501
)
- subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+ subparser.add_argument(
+ 'packages',
+ nargs=argparse.REMAINDER,
+ help="specs of packages to uninstall"
+ )
def concretize_specs(specs, allow_multiple_matches=False, force=False):
- """
- Returns a list of specs matching the non necessarily concretized specs given from cli
+ """Returns a list of specs matching the non necessarily
+ concretized specs given from cli
Args:
specs: list of specs to be matched against installed packages
- allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
+ allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
- specs_from_cli = [] # List of specs that match expressions given via command line
+ # List of specs that match expressions given via command line
+ specs_from_cli = []
has_errors = False
for spec in specs:
matching = spack.installed_db.query(spec)
@@ -99,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, **display_args)
+ spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
@@ -116,8 +109,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs):
- """
- Returns a dictionary that maps a spec with a list of its installed dependents
+ """Returns a dictionary that maps a spec with a list of its
+ installed dependents
Args:
specs: list of specs to be checked for dependents
@@ -147,7 +140,7 @@ def do_uninstall(specs, force):
try:
# should work if package is known to spack
packages.append(item.package)
- except spack.repository.UnknownPackageError as e:
+ except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(item).do_uninstall(force=True)
@@ -169,17 +162,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli
- uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
- dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
+ # takes care of '-a' is given in the cli
+ uninstall_list = concretize_specs(specs, args.all, args.force)
+ dependent_list = installed_dependents(
+ uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list
has_error = False
if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items():
- tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
+ tty.error("Will not uninstall %s" %
+ spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
- display_specs(lst, **display_args)
+ spack.cmd.display_specs(lst, **display_args)
print('')
has_error = True
elif args.dependents:
@@ -188,14 +184,14 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))
if has_error:
- tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
+ tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
- display_specs(uninstall_list, **display_args)
+ spack.cmd.display_specs(uninstall_list, **display_args)
print('')
- ask_for_confirmation('Do you want to proceed ? ')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 84179e1469..8b5e96f97d 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -328,6 +328,11 @@ section_schemas = {
'anyOf': [
{
'properties': {
+ 'hash_length': {
+ 'type': 'integer',
+ 'minimum': 0,
+ 'default': 7
+ },
'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': {
@@ -492,8 +497,15 @@ class ConfigScope(object):
"""Empty cached config information."""
self.sections = {}
+"""Default configuration scope is the lowest-level scope. These are
+ versioned with Spack and can be overridden by sites or users."""
+ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults'))
-ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
+"""Site configuration is per spack instance, for sites or projects.
+ No site-level configs should be checked into spack by default."""
+ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
+
+"""User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack'))
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index c95abd7423..317b0d5784 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -60,7 +60,7 @@ from spack.repository import UnknownPackageError
_db_dirname = '.spack-db'
# DB version. This is stuck in the DB file to track changes in format.
-_db_version = Version('0.9.1')
+_db_version = Version('0.9.2')
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
@@ -215,14 +215,10 @@ class Database(object):
# Add dependencies from other records in the install DB to
# form a full spec.
if 'dependencies' in spec_dict[spec.name]:
- for dep in spec_dict[spec.name]['dependencies'].values():
- if type(dep) == tuple:
- dep_hash, deptypes = dep
- else:
- dep_hash = dep
- deptypes = spack.alldeps
- child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
- spec._add_dependency(child, deptypes)
+ yaml_deps = spec_dict[spec.name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ child = self._read_spec_from_yaml(dhash, installs, hash_key)
+ spec._add_dependency(child, dtypes)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@@ -639,13 +635,14 @@ class WriteTransaction(_Transaction):
class CorruptDatabaseError(SpackError):
def __init__(self, path, msg=''):
super(CorruptDatabaseError, self).__init__(
- "Spack database is corrupt: %s. %s." + \
- "Try running `spack reindex` to fix." % (path, msg))
+ "Spack database is corrupt: %s. %s." % (path, msg),
+ "Try running `spack reindex` to fix.")
class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__(
- "Expected database version %s but found version %s." + \
- "Try running `spack reindex` to fix." %
- (expected, found))
+ "Expected database version %s but found version %s."
+ % (expected, found),
+ "`spack reindex` may fix this, or you may need a newer "
+ "Spack version.")
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index a5e76043ad..8150a6da2b 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -34,6 +34,7 @@ import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
+import spack
from spack.spec import Spec
from spack.error import SpackError
@@ -223,8 +224,14 @@ class YamlDirectoryLayout(DirectoryLayout):
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
- with open(path) as f:
- spec = Spec.from_yaml(f)
+ try:
+ with open(path) as f:
+ spec = Spec.from_yaml(f)
+ except Exception as e:
+ if spack.debug:
+ raise
+ raise SpecReadError(
+ 'Unable to read file: %s' % path, 'Cause: ' + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
@@ -456,10 +463,12 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"Install path %s already exists!")
+class SpecReadError(DirectoryLayoutError):
+ """Raised when directory layout can't read a spec."""
+
+
class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it."""
- def __init__(self, message):
- super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index a2e528d295..72656b8ae0 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -188,6 +188,8 @@ def parse_config_options(module_generator):
#####
# Automatic loading loads
+ module_file_actions['hash_length'] = module_configuration.get(
+ 'hash_length', 7)
module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites
@@ -237,6 +239,7 @@ class EnvModule(object):
formats = {}
class __metaclass__(type):
+
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION[
@@ -295,7 +298,9 @@ class EnvModule(object):
if constraint in self.spec:
suffixes.append(suffix)
# Always append the hash to make the module file unique
- suffixes.append(self.spec.dag_hash())
+ hash_length = configuration.pop('hash_length', 7)
+ if hash_length != 0:
+ suffixes.append(self.spec.dag_hash(length=hash_length))
name = '-'.join(suffixes)
return name
@@ -338,7 +343,7 @@ class EnvModule(object):
return False
- def write(self):
+ def write(self, overwrite=False):
"""
Writes out a module file for this object.
@@ -399,6 +404,15 @@ class EnvModule(object):
for line in self.module_specific_content(module_configuration):
module_file_content += line
+ # Print a warning in case I am accidentally overwriting
+ # a module file that is already there (name clash)
+ if not overwrite and os.path.exists(self.file_name):
+ message = 'Module file already exists : skipping creation\n'
+ message += 'file : {0.file_name}\n'
+ message += 'spec : {0.spec}'
+ tty.warn(message.format(self))
+ return
+
# Dump to file
with open(self.file_name, 'w') as f:
f.write(module_file_content)
@@ -454,7 +468,7 @@ class EnvModule(object):
class Dotkit(EnvModule):
name = 'dotkit'
-
+ path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
@@ -466,7 +480,7 @@ class Dotkit(EnvModule):
@property
def file_name(self):
- return join_path(spack.share_path, "dotkit", self.spec.architecture,
+ return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name)
@property
@@ -494,7 +508,7 @@ class Dotkit(EnvModule):
class TclModule(EnvModule):
name = 'tcl'
-
+ path = join_path(spack.share_path, "modules")
environment_modifications_formats = {
PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
@@ -514,7 +528,7 @@ class TclModule(EnvModule):
@property
def file_name(self):
- return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name)
+ return join_path(self.path, self.spec.architecture, self.use_name)
@property
def header(self):
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index c41bd0206e..68360ec532 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -64,7 +64,7 @@ from spack import directory_layout
from spack.stage import Stage, ResourceStage, StageComposite
from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment
-from spack.util.executable import ProcessError
+from spack.util.executable import ProcessError, which
from spack.version import *
"""Allowed URL schemes for spack packages."""
@@ -718,6 +718,11 @@ class PackageBase(object):
def fetcher(self, f):
self._fetcher = f
+ def dependencies_of_type(self, *deptypes):
+ """Get subset of the dependencies with certain types."""
+ return dict((name, conds) for name, conds in self.dependencies.items()
+ if any(d in self._deptypes[name] for d in deptypes))
+
@property
def extendee_spec(self):
"""
@@ -840,7 +845,7 @@ class PackageBase(object):
if self.name == spec.name:
continue
# XXX(deptype): Should build dependencies not count here?
- #for dep in spec.traverse(deptype=('run')):
+ # for dep in spec.traverse(deptype=('run')):
for dep in spec.traverse(deptype=spack.alldeps):
if self.spec == dep:
dependents.append(spec)
@@ -852,13 +857,13 @@ class PackageBase(object):
return self.spec.prefix
@property
- #TODO: Change this to architecture
+ # TODO: Change this to architecture
def compiler(self):
"""Get the spack.compiler.Compiler object used to build this package"""
if not self.spec.concrete:
raise ValueError("Can only get a compiler for a concrete package.")
return spack.compilers.compiler_for_spec(self.spec.compiler,
- self.spec.architecture)
+ self.spec.architecture)
def url_version(self, version):
"""
@@ -1063,7 +1068,8 @@ class PackageBase(object):
run_tests -- Run tests within the package's install()
"""
if not self.spec.concrete:
- raise ValueError("Can only install concrete packages: %s." % self.spec.name)
+ raise ValueError("Can only install concrete packages: %s."
+ % self.spec.name)
# No installation needed if package is external
if self.spec.external:
@@ -1713,6 +1719,13 @@ def install_dependency_symlinks(pkg, spec, prefix):
flatten_dependencies(spec, prefix)
+def use_cray_compiler_names():
+ """Compiler names for builds that rely on cray compiler names."""
+ os.environ['CC'] = 'cc'
+ os.environ['CXX'] = 'CC'
+ os.environ['FC'] = 'ftn'
+ os.environ['F77'] = 'ftn'
+
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py
index e710303e23..e3c7761a94 100644
--- a/lib/spack/spack/platforms/cray_xc.py
+++ b/lib/spack/spack/platforms/cray_xc.py
@@ -2,6 +2,8 @@ import os
from spack.architecture import Platform, Target
from spack.operating_systems.linux_distro import LinuxDistro
from spack.operating_systems.cnl import Cnl
+from spack.util.executable import which
+
class CrayXc(Platform):
priority = 20
@@ -9,9 +11,8 @@ class CrayXc(Platform):
back_end = 'ivybridge'
default = 'ivybridge'
- front_os = "SuSE11"
back_os = "CNL10"
- default_os = "CNL10"
+ default_os = "CNL10"
def __init__(self):
''' Since cori doesn't have ivybridge as a front end it's better
@@ -32,15 +33,27 @@ class CrayXc(Platform):
# Could switch to use modules and fe targets for front end
# Currently using compilers by path for front end.
self.add_target('sandybridge', Target('sandybridge'))
- self.add_target('ivybridge',
+ self.add_target('ivybridge',
Target('ivybridge', 'craype-ivybridge'))
- self.add_target('haswell',
- Target('haswell','craype-haswell'))
+ self.add_target('haswell',
+ Target('haswell', 'craype-haswell'))
- self.add_operating_system('SuSE11', LinuxDistro())
+ # Front end of the cray platform is a linux distro.
+ linux_dist = LinuxDistro()
+ self.front_os = str(linux_dist)
+ self.add_operating_system(str(linux_dist), linux_dist)
self.add_operating_system('CNL10', Cnl())
@classmethod
def detect(self):
- return os.path.exists('/opt/cray/craype')
-
+ try:
+ cc_verbose = which('ftn')
+ text = cc_verbose('-craype-verbose',
+ output=str, error=str,
+ ignore_errors=True).split()
+ if '-D__CRAYXC' in text:
+ return True
+ else:
+ return False
+ except:
+ return False
diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py
index 4820584150..1b94f03de7 100644
--- a/lib/spack/spack/preferred_packages.py
+++ b/lib/spack/spack/preferred_packages.py
@@ -26,8 +26,10 @@
import spack
from spack.version import *
+
class PreferredPackages(object):
- _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent
+ # Arbitrary, but consistent
+ _default_order = {'compiler': ['gcc', 'intel', 'clang', 'pgi', 'xlc']}
def __init__(self):
self.preferred = spack.config.get_config('packages')
@@ -35,24 +37,25 @@ class PreferredPackages(object):
# Given a package name, sort component (e.g, version, compiler, ...), and
# a second_key (used by providers), return the list
- def _order_for_package(self, pkgname, component, second_key, test_all=True):
+ def _order_for_package(self, pkgname, component, second_key,
+ test_all=True):
pkglist = [pkgname]
if test_all:
pkglist.append('all')
for pkg in pkglist:
order = self.preferred.get(pkg, {}).get(component, {})
- if type(order) is dict:
+ if isinstance(order, dict) and second_key:
order = order.get(second_key, {})
if not order:
continue
return [str(s).strip() for s in order]
return []
-
# A generic sorting function. Given a package name and sort
# component, return less-than-0, 0, or greater-than-0 if
# a is respectively less-than, equal to, or greater than b.
- def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
+ def _component_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
if a is None:
return -1
if b is None:
@@ -84,92 +87,102 @@ class PreferredPackages(object):
else:
return 0
-
# A sorting function for specs. Similar to component_compare, but
# a and b are considered to match entries in the sorting list if they
# satisfy the list component.
- def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
- if not a or not a.concrete:
+ def _spec_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
+ if not a or (not a.concrete and not second_key):
return -1
- if not b or not b.concrete:
+ if not b or (not b.concrete and not second_key):
return 1
specs = self._spec_for_pkgname(pkgname, component, second_key)
a_index = None
b_index = None
reverse = -1 if reverse_natural_compare else 1
for i, cspec in enumerate(specs):
- if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)):
+ if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)):
a_index = i
if b_index:
break
- if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)):
+ if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)):
b_index = i
if a_index:
break
- if a_index != None and b_index == None: return -1
- elif a_index == None and b_index != None: return 1
- elif a_index != None and b_index == a_index: return -1 * cmp(a, b)
- elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index)
- else: return cmp(a, b) * reverse
-
-
+ if a_index is not None and b_index is None:
+ return -1
+ elif a_index is None and b_index is not None:
+ return 1
+ elif a_index is not None and b_index == a_index:
+ return -1 * cmp(a, b)
+ elif (a_index is not None and b_index is not None and
+ a_index != b_index):
+ return cmp(a_index, b_index)
+ else:
+ return cmp(a, b) * reverse
# Given a sort order specified by the pkgname/component/second_key, return
# a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
def _spec_for_pkgname(self, pkgname, component, second_key):
key = (pkgname, component, second_key)
- if not key in self._spec_for_pkgname_cache:
+ if key not in self._spec_for_pkgname_cache:
pkglist = self._order_for_package(pkgname, component, second_key)
if not pkglist:
if component in self._default_order:
pkglist = self._default_order[component]
if component == 'compiler':
- self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.CompilerSpec(s) for s in pkglist]
elif component == 'version':
- self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [VersionList(s) for s in pkglist]
else:
- self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.Spec(s) for s in pkglist]
return self._spec_for_pkgname_cache[key]
-
def provider_compare(self, pkgname, provider_str, a, b):
- """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or
- greater-than b. A and b are possible implementations of provider_str.
- One provider is less-than another if it is preferred over the other.
- For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if
- mvapich should be preferred over openmpi for scorep."""
- return self._spec_compare(pkgname, 'providers', a, b, False, provider_str)
-
+ """Return less-than-0, 0, or greater than 0 if a is respecively
+ less-than, equal-to, or greater-than b. A and b are possible
+ implementations of provider_str. One provider is less-than another
+ if it is preferred over the other. For example,
+ provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would
+ return -1 if mvapich should be preferred over openmpi for scorep."""
+ return self._spec_compare(pkgname, 'providers', a, b, False,
+ provider_str)
def spec_has_preferred_provider(self, pkgname, provider_str):
- """Return True iff the named package has a list of preferred provider"""
- return bool(self._order_for_package(pkgname, 'providers', provider_str, False))
-
+ """Return True iff the named package has a list of preferred
+ providers"""
+ return bool(self._order_for_package(pkgname, 'providers',
+ provider_str, False))
def version_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if version a of pkgname is
- respecively less-than, equal-to, or greater-than version b of pkgname.
- One version is less-than another if it is preferred over the other."""
+ respectively less-than, equal-to, or greater-than version b of
+ pkgname. One version is less-than another if it is preferred over
+ the other."""
return self._spec_compare(pkgname, 'version', a, b, True, None)
-
def variant_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if variant a of pkgname is
- respecively less-than, equal-to, or greater-than variant b of pkgname.
- One variant is less-than another if it is preferred over the other."""
+ respectively less-than, equal-to, or greater-than variant b of
+ pkgname. One variant is less-than another if it is preferred over
+ the other."""
return self._component_compare(pkgname, 'variant', a, b, False, None)
-
def architecture_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is
- respecively less-than, equal-to, or greater-than architecture b of pkgname.
- One architecture is less-than another if it is preferred over the other."""
- return self._component_compare(pkgname, 'architecture', a, b, False, None)
-
+ """Return less-than-0, 0, or greater than 0 if architecture a of pkgname
+ is respectively less-than, equal-to, or greater-than architecture b
+ of pkgname. One architecture is less-than another if it is preferred
+ over the other."""
+ return self._component_compare(pkgname, 'architecture', a, b,
+ False, None)
def compiler_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
- respecively less-than, equal-to, or greater-than compiler b of pkgname.
- One compiler is less-than another if it is preferred over the other."""
+ respecively less-than, equal-to, or greater-than compiler b of
+ pkgname. One compiler is less-than another if it is preferred over
+ the other."""
return self._spec_compare(pkgname, 'compiler', a, b, False, None)
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index d3a5f66e57..e694f2b2da 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -96,7 +96,6 @@ specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
import sys
-import itertools
import hashlib
import base64
import imp
@@ -116,8 +115,6 @@ import spack.parse
import spack.error
import spack.compilers as compilers
-# TODO: move display_specs to some other location.
-from spack.cmd.find import display_specs
from spack.version import *
from spack.util.string import *
from spack.util.prefix import Prefix
@@ -155,6 +152,7 @@ _separators = '[%s]' % ''.join(color_formats.keys())
every time we call str()"""
_any_version = VersionList([':'])
+# Special types of dependencies.
alldeps = ('build', 'link', 'run')
nolink = ('build', 'run')
@@ -296,10 +294,15 @@ class CompilerSpec(object):
@key_ordering
class DependencySpec(object):
- """
- Dependencies have conditions in which they apply.
+ """Dependencies can be one (or more) of several types:
+
+ - build: needs to be in the PATH at build time.
+ - link: is linked to and added to compiler flags.
+ - run: needs to be in the PATH for the package to run.
- This stores both what is depended on and why it is a dependency.
+ Fields:
+ - spec: the spack.spec.Spec description of a dependency.
+ - deptypes: strings representing the type of dependency this is.
"""
def __init__(self, spec, deptypes):
self.spec = spec
@@ -558,15 +561,15 @@ class Spec(object):
def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype)
- return [(dep.spec.name, dep)
- for dep in where.values()
- if deptype and any(d in deptype for d in dep.deptypes)]
+ return dict((dep.spec.name, dep)
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes))
def dependencies_dict(self, deptype=None):
- return dict(self._find_deps_dict(self._dependencies, deptype))
+ return self._find_deps_dict(self._dependencies, deptype)
def dependents_dict(self, deptype=None):
- return dict(self._find_deps_dict(self._dependents, deptype))
+ return self._find_deps_dict(self._dependents, deptype)
#
# Private routines here are called by the parser when building a spec.
@@ -644,7 +647,8 @@ class Spec(object):
mod = imp.load_source(mod_name, path)
class_name = mod_to_class(value)
if not hasattr(mod, class_name):
- tty.die('No class %s defined in %s' % (class_name, mod_name))
+ tty.die(
+ 'No class %s defined in %s' % (class_name, mod_name))
cls = getattr(mod, class_name)
if not inspect.isclass(cls):
tty.die('%s.%s is not a class' % (mod_name, class_name))
@@ -667,13 +671,15 @@ class Spec(object):
def _set_os(self, value):
"""Called by the parser to set the architecture operating system"""
- if self.architecture.platform:
- self.architecture.platform_os = self.architecture.platform.operating_system(value)
+ arch = self.architecture
+ if arch.platform:
+ arch.platform_os = arch.platform.operating_system(value)
def _set_target(self, value):
"""Called by the parser to set the architecture target"""
- if self.architecture.platform:
- self.architecture.target = self.architecture.platform.target(value)
+ arch = self.architecture
+ if arch.platform:
+ arch.target = arch.platform.target(value)
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
@@ -688,8 +694,9 @@ class Spec(object):
#
@property
def fullname(self):
- return (('%s.%s' % (self.namespace, self.name)) if self.namespace else
- (self.name if self.name else ''))
+ return (
+ ('%s.%s' % (self.namespace, self.name)) if self.namespace else
+ (self.name if self.name else ''))
@property
def root(self):
@@ -745,15 +752,15 @@ class Spec(object):
if self._concrete:
return True
- self._concrete = bool(not self.virtual
- and self.namespace is not None
- and self.versions.concrete
- and self.variants.concrete
- and self.architecture
- and self.architecture.concrete
- and self.compiler and self.compiler.concrete
- and self.compiler_flags.concrete
- and self._dependencies.concrete)
+ self._concrete = bool(not self.virtual and
+ self.namespace is not None and
+ self.versions.concrete and
+ self.variants.concrete and
+ self.architecture and
+ self.architecture.concrete and
+ self.compiler and self.compiler.concrete and
+ self.compiler_flags.concrete and
+ self._dependencies.concrete)
return self._concrete
def traverse(self, visited=None, deptype=None, **kwargs):
@@ -864,9 +871,9 @@ class Spec(object):
for name in sorted(successors):
child = successors[name]
children = child.spec.traverse_with_deptype(
- visited, d=d + 1, deptype=deptype_query,
- deptype_query=deptype_query,
- _self_deptype=child.deptypes, **kwargs)
+ visited, d=d + 1, deptype=deptype_query,
+ deptype_query=deptype_query,
+ _self_deptype=child.deptypes, **kwargs)
for elt in children:
yield elt
@@ -914,9 +921,11 @@ class Spec(object):
d = {
'parameters': params,
'arch': self.architecture,
- 'dependencies': dict((d, (deps[d].spec.dag_hash(),
- deps[d].deptypes))
- for d in sorted(deps.keys()))
+ 'dependencies': dict(
+ (name, {
+ 'hash': dspec.spec.dag_hash(),
+ 'type': [str(s) for s in dspec.deptypes]})
+ for name, dspec in deps.items())
}
# Older concrete specs do not have a namespace. Omit for
@@ -982,14 +991,34 @@ class Spec(object):
raise SpackRecordError(
"Did not find a valid format for variants in YAML file")
- # XXX(deptypes): why are dependencies not meant to be read here?
- #for name, dep_info in node['dependencies'].items():
- # (dag_hash, deptypes) = dep_info
- # spec._dependencies[name] = DependencySpec(dag_hash, deptypes)
+ # Don't read dependencies here; from_node_dict() is used by
+ # from_yaml() to read the root *and* each dependency spec.
return spec
@staticmethod
+ def read_yaml_dep_specs(dependency_dict):
+ """Read the DependencySpec portion of a YAML-formatted Spec.
+
+ This needs to be backward-compatible with older spack spec
+ formats so that reindex will work on old specs/databases.
+ """
+ for dep_name, elt in dependency_dict.items():
+ if isinstance(elt, basestring):
+ # original format, elt is just the dependency hash.
+ dag_hash, deptypes = elt, ['build', 'link']
+ elif isinstance(elt, tuple):
+ # original deptypes format: (used tuples, not future-proof)
+ dag_hash, deptypes = elt
+ elif isinstance(elt, dict):
+ # new format: elements of dependency spec are keyed.
+ dag_hash, deptypes = elt['hash'], elt['type']
+ else:
+ raise SpecError("Couldn't parse dependency types in spec.")
+
+ yield dep_name, dag_hash, list(deptypes)
+
+ @staticmethod
def from_yaml(stream):
"""Construct a spec from YAML.
@@ -1000,27 +1029,30 @@ class Spec(object):
represent more than the DAG does.
"""
- deps = {}
- spec = None
-
try:
yfile = yaml.load(stream)
except MarkedYAMLError, e:
raise SpackYAMLError("error parsing YAML spec:", str(e))
- for node in yfile['spec']:
- name = next(iter(node))
- dep = Spec.from_node_dict(node)
- if not spec:
- spec = dep
- deps[dep.name] = dep
+ nodes = yfile['spec']
- for node in yfile['spec']:
+ # Read nodes out of list. Root spec is the first element;
+ # dependencies are the following elements.
+ dep_list = [Spec.from_node_dict(node) for node in nodes]
+ if not dep_list:
+ raise SpecError("YAML spec contains no nodes.")
+ deps = dict((spec.name, spec) for spec in dep_list)
+ spec = dep_list[0]
+
+ for node in nodes:
+ # get dependency dict from the node.
name = next(iter(node))
- for dep_name, (dep, deptypes) in \
- node[name]['dependencies'].items():
- deps[name]._dependencies[dep_name] = \
- DependencySpec(deps[dep_name], deptypes)
+ yaml_deps = node[name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ # Fill in dependencies by looking them up by name in deps dict
+ deps[name]._dependencies[dname] = DependencySpec(
+ deps[dname], set(dtypes))
+
return spec
def _concretize_helper(self, presets=None, visited=None):
@@ -1171,14 +1203,16 @@ class Spec(object):
def feq(cfield, sfield):
return (not cfield) or (cfield == sfield)
- if replacement is spec or (feq(replacement.name, spec.name) and
- feq(replacement.versions, spec.versions) and
- feq(replacement.compiler, spec.compiler) and
- feq(replacement.architecture, spec.architecture) and
- feq(replacement._dependencies, spec._dependencies) and
- feq(replacement.variants, spec.variants) and
- feq(replacement.external, spec.external) and
- feq(replacement.external_module, spec.external_module)):
+ if replacement is spec or (
+ feq(replacement.name, spec.name) and
+ feq(replacement.versions, spec.versions) and
+ feq(replacement.compiler, spec.compiler) and
+ feq(replacement.architecture, spec.architecture) and
+ feq(replacement._dependencies, spec._dependencies) and
+ feq(replacement.variants, spec.variants) and
+ feq(replacement.external, spec.external) and
+ feq(replacement.external_module,
+ spec.external_module)):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
@@ -1235,10 +1269,10 @@ class Spec(object):
if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
-
for s in self.traverse(root=False):
if s.external_module:
- compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture)
+ compiler = spack.compilers.compiler_for_spec(
+ s.compiler, s.architecture)
for mod in compiler.modules:
load_module(mod)
@@ -1505,13 +1539,13 @@ class Spec(object):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
# Get all the dependencies into one DependencyMap
- spec_deps = self.flat_dependencies_with_deptype(copy=False,
- deptype_query=alldeps)
+ spec_deps = self.flat_dependencies_with_deptype(
+ copy=False, deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
- provider_index = ProviderIndex([s.spec for s in spec_deps.values()],
- restrict=True)
+ provider_index = ProviderIndex(
+ [s.spec for s in spec_deps.values()], restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
@@ -1584,20 +1618,17 @@ class Spec(object):
other.variants[v])
# TODO: Check out the logic here
- if self.architecture is not None and other.architecture is not None:
- if self.architecture.platform is not None and other.architecture.platform is not None:
- if self.architecture.platform != other.architecture.platform:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
- if self.architecture.platform_os is not None and other.architecture.platform_os is not None:
- if self.architecture.platform_os != other.architecture.platform_os:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
- if self.architecture.target is not None and other.architecture.target is not None:
- if self.architecture.target != other.architecture.target:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
-
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is not None and oarch is not None:
+ if sarch.platform is not None and oarch.platform is not None:
+ if sarch.platform != oarch.platform:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.platform_os is not None and oarch.platform_os is not None:
+ if sarch.platform_os != oarch.platform_os:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.target is not None and oarch.target is not None:
+ if sarch.target != oarch.target:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
changed = False
if self.compiler is not None and other.compiler is not None:
@@ -1612,15 +1643,16 @@ class Spec(object):
changed |= self.compiler_flags.constrain(other.compiler_flags)
old = str(self.architecture)
- if self.architecture is None or other.architecture is None:
- self.architecture = self.architecture or other.architecture
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is None or other.architecture is None:
+ self.architecture = sarch or oarch
else:
- if self.architecture.platform is None or other.architecture.platform is None:
- self.architecture.platform = self.architecture.platform or other.architecture.platform
- if self.architecture.platform_os is None or other.architecture.platform_os is None:
- self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os
- if self.architecture.target is None or other.architecture.target is None:
- self.architecture.target = self.architecture.target or other.architecture.target
+ if sarch.platform is None or oarch.platform is None:
+ self.architecture.platform = sarch.platform or oarch.platform
+ if sarch.platform_os is None or oarch.platform_os is None:
+ sarch.platform_os = sarch.platform_os or oarch.platform_os
+ if sarch.target is None or oarch.target is None:
+ sarch.target = sarch.target or oarch.target
changed |= (str(self.architecture) != old)
if deps:
@@ -1751,15 +1783,25 @@ class Spec(object):
# Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained.
- if self.architecture and other.architecture:
- if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or
- (self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or
- (self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)):
+ sarch, oarch = self.architecture, other.architecture
+ if sarch and oarch:
+ if ((sarch.platform and
+ oarch.platform and
+ sarch.platform != oarch.platform) or
+
+ (sarch.platform_os and
+ oarch.platform_os and
+ sarch.platform_os != oarch.platform_os) or
+
+ (sarch.target and
+ oarch.target and
+ sarch.target != oarch.target)):
return False
- elif strict and ((other.architecture and not self.architecture) or
- (other.architecture.platform and not self.architecture.platform) or
- (other.architecture.platform_os and not self.architecture.platform_os) or
- (other.architecture.target and not self.architecture.target)):
+
+ elif strict and ((oarch and not sarch) or
+ (oarch.platform and not sarch.platform) or
+ (oarch.platform_os and not sarch.platform_os) or
+ (oarch.target and not sarch.target)):
return False
if not self.compiler_flags.satisfies(
@@ -1841,11 +1883,16 @@ class Spec(object):
# We don't count dependencies as changes here
changed = True
if hasattr(self, 'name'):
- changed = (self.name != other.name and self.versions != other.versions and \
- self.architecture != other.architecture and self.compiler != other.compiler and \
- self.variants != other.variants and self._normal != other._normal and \
- self.concrete != other.concrete and self.external != other.external and \
- self.external_module != other.external_module and self.compiler_flags != other.compiler_flags)
+ changed = (self.name != other.name and
+ self.versions != other.versions and
+ self.architecture != other.architecture and
+ self.compiler != other.compiler and
+ self.variants != other.variants and
+ self._normal != other._normal and
+ self.concrete != other.concrete and
+ self.external != other.external and
+ self.external_module != other.external_module and
+ self.compiler_flags != other.compiler_flags)
# Local node attributes get copied first.
self.name = other.name
@@ -1889,7 +1936,7 @@ class Spec(object):
# here.
if depspec.spec.name not in new_spec._dependencies:
new_spec._add_dependency(
- new_nodes[depspec.spec.name], depspec.deptypes)
+ new_nodes[depspec.spec.name], depspec.deptypes)
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
@@ -2000,7 +2047,6 @@ class Spec(object):
self.compiler,
self.compiler_flags)
-
def eq_node(self, other):
"""Equality with another spec, not including dependencies."""
return self._cmp_node() == other._cmp_node()
@@ -2196,41 +2242,39 @@ class Spec(object):
def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps())
-
def __cmp__(self, other):
- #Package name sort order is not configurable, always goes alphabetical
+ # Package name sort order is not configurable, always goes alphabetical
if self.name != other.name:
return cmp(self.name, other.name)
- #Package version is second in compare order
+ # Package version is second in compare order
pkgname = self.name
if self.versions != other.versions:
- return spack.pkgsort.version_compare(pkgname,
- self.versions, other.versions)
+ return spack.pkgsort.version_compare(
+ pkgname, self.versions, other.versions)
- #Compiler is third
+ # Compiler is third
if self.compiler != other.compiler:
- return spack.pkgsort.compiler_compare(pkgname,
- self.compiler, other.compiler)
+ return spack.pkgsort.compiler_compare(
+ pkgname, self.compiler, other.compiler)
- #Variants
+ # Variants
if self.variants != other.variants:
- return spack.pkgsort.variant_compare(pkgname,
- self.variants, other.variants)
+ return spack.pkgsort.variant_compare(
+ pkgname, self.variants, other.variants)
- #Target
+ # Target
if self.architecture != other.architecture:
- return spack.pkgsort.architecture_compare(pkgname,
- self.architecture, other.architecture)
+ return spack.pkgsort.architecture_compare(
+ pkgname, self.architecture, other.architecture)
- #Dependency is not configurable
+ # Dependency is not configurable
if self._dependencies != other._dependencies:
return -1 if self._dependencies < other._dependencies else 1
- #Equal specs
+ # Equal specs
return 0
-
def __str__(self):
return self.format() + self.dep_string()
@@ -2244,12 +2288,14 @@ class Spec(object):
indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@+$+$=')
prefix = kwargs.pop('prefix', None)
+ deptypes = kwargs.pop('deptypes', ('build', 'link'))
check_kwargs(kwargs, self.tree)
out = ""
cur_id = 0
ids = {}
- for d, node in self.traverse(order='pre', cover=cover, depth=True):
+ for d, node in self.traverse(
+ order='pre', cover=cover, depth=True, deptypes=deptypes):
if prefix is not None:
out += prefix(node)
out += " " * indent
@@ -2303,8 +2349,8 @@ class SpecLexer(spack.parse.Lexer):
# Lexer is always the same for every parser.
_lexer = SpecLexer()
-class SpecParser(spack.parse.Parser):
+class SpecParser(spack.parse.Parser):
def __init__(self):
super(SpecParser, self).__init__(_lexer)
self.previous = None
@@ -2357,8 +2403,8 @@ class SpecParser(spack.parse.Parser):
except spack.parse.ParseError, e:
raise SpecParseError(e)
-
- # If the spec has an os or a target and no platform, give it the default platform
+ # If the spec has an os or a target and no platform, give it
+ # the default platform
for spec in specs:
for s in spec.traverse():
if s.architecture.os_string or s.architecture.target_string:
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index fb91f24721..a849d5f350 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -32,15 +32,17 @@ from llnl.util.tty.colify import colify
from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite"""
-test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'packages', 'stage',
- 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
- 'multimethod', 'install', 'package_sanity', 'config',
- 'directory_layout', 'pattern', 'python_version', 'git_fetch',
- 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
- 'cc', 'link_tree', 'spec_yaml', 'optional_deps',
- 'make_executable', 'configure_guess', 'lock', 'database',
- 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
- 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd']
+test_names = [
+ 'architecture', 'versions', 'url_parse', 'url_substitution', 'packages',
+ 'stage', 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
+ 'multimethod', 'install', 'package_sanity', 'config', 'directory_layout',
+ 'pattern', 'python_version', 'git_fetch', 'svn_fetch', 'hg_fetch',
+ 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml',
+ 'optional_deps', 'make_executable', 'build_system_guess', 'lock',
+ 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
+ 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd',
+ 'cmd.module'
+]
def list_tests():
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
index ae3f08deed..09bdb021af 100644
--- a/lib/spack/spack/test/architecture.py
+++ b/lib/spack/spack/test/architecture.py
@@ -1,7 +1,31 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
""" Test checks if the architecture class is created correctly and also that
the functions are looking for the correct architecture name
"""
-import unittest
+import itertools
import os
import platform as py_platform
import spack
@@ -14,9 +38,8 @@ from spack.platforms.darwin import Darwin
from spack.test.mock_packages_test import *
-#class ArchitectureTest(unittest.TestCase):
-class ArchitectureTest(MockPackagesTest):
+class ArchitectureTest(MockPackagesTest):
def setUp(self):
super(ArchitectureTest, self).setUp()
self.platform = spack.architecture.platform()
@@ -36,24 +59,22 @@ class ArchitectureTest(MockPackagesTest):
self.assertEqual(arch, new_arch)
- self.assertTrue( isinstance(arch, spack.architecture.Arch) )
- self.assertTrue( isinstance(arch.platform, spack.architecture.Platform) )
- self.assertTrue( isinstance(arch.platform_os,
- spack.architecture.OperatingSystem) )
- self.assertTrue( isinstance(arch.target,
- spack.architecture.Target) )
- self.assertTrue( isinstance(new_arch, spack.architecture.Arch) )
- self.assertTrue( isinstance(new_arch.platform,
- spack.architecture.Platform) )
- self.assertTrue( isinstance(new_arch.platform_os,
- spack.architecture.OperatingSystem) )
- self.assertTrue( isinstance(new_arch.target,
- spack.architecture.Target) )
-
+ self.assertTrue(isinstance(arch, spack.architecture.Arch))
+ self.assertTrue(isinstance(arch.platform, spack.architecture.Platform))
+ self.assertTrue(isinstance(arch.platform_os,
+ spack.architecture.OperatingSystem))
+ self.assertTrue(isinstance(arch.target,
+ spack.architecture.Target))
+ self.assertTrue(isinstance(new_arch, spack.architecture.Arch))
+ self.assertTrue(isinstance(new_arch.platform,
+ spack.architecture.Platform))
+ self.assertTrue(isinstance(new_arch.platform_os,
+ spack.architecture.OperatingSystem))
+ self.assertTrue(isinstance(new_arch.target,
+ spack.architecture.Target))
def test_platform(self):
output_platform_class = spack.architecture.platform()
- my_arch_class = None
if os.path.exists('/opt/cray/craype'):
my_platform_class = CrayXc()
elif os.path.exists('/bgsys'):
@@ -91,7 +112,7 @@ class ArchitectureTest(MockPackagesTest):
default_os = self.platform.operating_system("default_os")
default_target = self.platform.target("default_target")
- default_spec = Spec("libelf") # default is no args
+ default_spec = Spec("libelf") # default is no args
default_spec.concretize()
self.assertEqual(default_os, default_spec.architecture.platform_os)
self.assertEqual(default_target, default_spec.architecture.target)
@@ -107,10 +128,11 @@ class ArchitectureTest(MockPackagesTest):
combinations = itertools.product(os_list, target_list)
results = []
for arch in combinations:
- o,t = arch
+ o, t = arch
spec = Spec("libelf os=%s target=%s" % (o, t))
spec.concretize()
- results.append(spec.architecture.platform_os == self.platform.operating_system(o))
+ results.append(spec.architecture.platform_os ==
+ self.platform.operating_system(o))
results.append(spec.architecture.target == self.platform.target(t))
res = all(results)
diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/build_system_guess.py
index bad3673e7a..e728a47cf4 100644
--- a/lib/spack/spack/test/configure_guess.py
+++ b/lib/spack/spack/test/build_system_guess.py
@@ -28,14 +28,14 @@ import tempfile
import unittest
from llnl.util.filesystem import *
-from spack.cmd.create import ConfigureGuesser
+from spack.cmd.create import BuildSystemGuesser
from spack.stage import Stage
from spack.test.mock_packages_test import *
from spack.util.executable import which
class InstallTest(unittest.TestCase):
- """Tests the configure guesser in spack create"""
+ """Tests the build system guesser in spack create"""
def setUp(self):
self.tar = which('tar')
@@ -44,12 +44,10 @@ class InstallTest(unittest.TestCase):
os.chdir(self.tmpdir)
self.stage = None
-
def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
os.chdir(self.orig_dir)
-
def check_archive(self, filename, system):
mkdirp('archive')
touch(join_path('archive', filename))
@@ -60,24 +58,24 @@ class InstallTest(unittest.TestCase):
with Stage(url) as stage:
stage.fetch()
- guesser = ConfigureGuesser()
- guesser(stage)
+ guesser = BuildSystemGuesser()
+ guesser(stage, url)
self.assertEqual(system, guesser.build_system)
-
- def test_python(self):
- self.check_archive('setup.py', 'python')
-
-
def test_autotools(self):
self.check_archive('configure', 'autotools')
-
def test_cmake(self):
self.check_archive('CMakeLists.txt', 'cmake')
+ def test_scons(self):
+ self.check_archive('SConstruct', 'scons')
- def test_unknown(self):
- self.check_archive('foobar', 'unknown')
+ def test_python(self):
+ self.check_archive('setup.py', 'python')
+ def test_R(self):
+ self.check_archive('NAMESPACE', 'R')
+ def test_unknown(self):
+ self.check_archive('foobar', 'unknown')
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
index 371e9650e0..fa82db7733 100644
--- a/lib/spack/spack/test/cmd/find.py
+++ b/lib/spack/spack/test/cmd/find.py
@@ -27,11 +27,7 @@
import spack.cmd.find
import unittest
-
-class Bunch(object):
-
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
+from spack.util.pattern import Bunch
class FindTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py
new file mode 100644
index 0000000000..36a4a73fe6
--- /dev/null
+++ b/lib/spack/spack/test/cmd/module.py
@@ -0,0 +1,83 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import os.path
+
+import spack.cmd.module as module
+import spack.modules as modules
+import spack.test.mock_database
+
+
+class TestModule(spack.test.mock_database.MockDatabase):
+
+ def _get_module_files(self, args):
+ return [
+ modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501
+ ]
+
+ def test_module_common_operations(self):
+ parser = argparse.ArgumentParser()
+ module.setup_parser(parser)
+ # Try to remove a non existing module [tcl]
+ args = parser.parse_args(['rm', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Remove existing modules [tcl]
+ args = parser.parse_args(['rm', '-y', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+ # Add them back [tcl]
+ args = parser.parse_args(['refresh', '-y', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ # TODO : test the --delete-tree option
+ # TODO : this requires having a separate directory for test modules
+ # Try to find a module with multiple matches
+ args = parser.parse_args(['find', 'mpileaks'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Try to find a module with no matches
+ args = parser.parse_args(['find', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+ # Try to find a module
+ args = parser.parse_args(['find', 'libelf'])
+ module.module(parser, args)
+ # Remove existing modules [dotkit]
+ args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+ # Add them back [dotkit]
+ args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ # TODO : add tests for loads and find to check the prompt format
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index 6d2e3705bd..135cd028e3 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -27,7 +27,6 @@ from contextlib import contextmanager
import StringIO
import spack.modules
-import unittest
from spack.test.mock_packages_test import MockPackagesTest
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
@@ -266,7 +265,7 @@ class TclTests(MockPackagesTest):
def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist
- spec = spack.spec.Spec('mpileaks')
+ spec = spack.spec.Spec('mpileaks ^zmpi')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 6d4bcb1039..bc5e9d2ffe 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -28,42 +28,50 @@ import functools
def composite(interface=None, method_list=None, container=list):
- """
- Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given
- interface.
+ """Returns a class decorator that patches a class adding all the methods
+ it needs to be a composite for a given interface.
- :param interface: class exposing the interface to which the composite object must conform. Only non-private and
- non-special methods will be taken into account
+ :param interface: class exposing the interface to which the composite
+ object must conform. Only non-private and non-special methods will be
+ taken into account
:param method_list: names of methods that should be part of the composite
- :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract.
- The composite class will expose the container API to manage object composition
+ :param container: container for the composite object (default = list).
+ Must fulfill the MutableSequence contract. The composite class will expose
+ the container API to manage object composition
:return: class decorator
"""
- # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't
- # The patched class returned by the decorator will inherit from the container class to expose the
- # interface needed to manage objects composition
+ # Check if container fulfills the MutableSequence contract and raise an
+ # exception if it doesn't. The patched class returned by the decorator will
+ # inherit from the container class to expose the interface needed to manage
+ # objects composition
if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract")
- # Check if at least one of the 'interface' or the 'method_list' arguments are defined
+ # Check if at least one of the 'interface' or the 'method_list' arguments
+ # are defined
if interface is None and method_list is None:
- raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite")
+ raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501
def cls_decorator(cls):
- # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden
+ # Retrieve the base class of the composite. Inspect its methods and
+ # decide which ones will be overridden
def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_')
- # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the
- # descriptor below to any method that needs to be patched.
+ # Patch the behavior of each of the methods in the previous list.
+ # This is done associating an instance of the descriptor below to
+ # any method that needs to be patched.
class IterateOver(object):
+ """Decorator used to patch methods in a composite.
+
+ It iterates over all the items in the instance containing the
+ associated attribute and calls for each of them an attribute
+ with the same name
"""
- Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
- associated attribute and calls for each of them an attribute with the same name
- """
+
def __init__(self, name, func=None):
self.name = name
self.func = func
@@ -72,8 +80,9 @@ def composite(interface=None, method_list=None, container=list):
def getter(*args, **kwargs):
for item in instance:
getattr(item, self.name)(*args, **kwargs)
- # If we are using this descriptor to wrap a method from an interface, then we must conditionally
- # use the `functools.wraps` decorator to set the appropriate fields.
+ # If we are using this descriptor to wrap a method from an
+ # interface, then we must conditionally use the
+ # `functools.wraps` decorator to set the appropriate fields
if self.func is not None:
getter = functools.wraps(self.func)(getter)
return getter
@@ -81,7 +90,8 @@ def composite(interface=None, method_list=None, container=list):
dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list}
+ # python@2.7: method_list_dict = {name: IterateOver(name) for name
+ # in method_list}
method_list_dict = {}
for name in method_list:
method_list_dict[name] = IterateOver(name)
@@ -89,28 +99,40 @@ def composite(interface=None, method_list=None, container=list):
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
##########
- # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)}
+ # python@2.7: interface_methods = {name: method for name, method in
+ # inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {}
- for name, method in inspect.getmembers(interface, predicate=no_special_no_private):
+ for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501
interface_methods[name] = method
##########
- # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()}
+ # python@2.7: interface_methods_dict = {name: IterateOver(name,
+ # method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {}
for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method)
##########
dictionary_for_type_call.update(interface_methods_dict)
- # Get the methods that are defined in the scope of the composite class and override any previous definition
+ # Get the methods that are defined in the scope of the composite
+ # class and override any previous definition
##########
- # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)}
+ # python@2.7: cls_method = {name: method for name, method in
+ # inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {}
- for name, method in inspect.getmembers(cls, predicate=inspect.ismethod):
+ for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501
cls_method[name] = method
##########
dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
- wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
+ wrapper_class = type(cls.__name__, (cls, container),
+ dictionary_for_type_call)
return wrapper_class
return cls_decorator
+
+
+class Bunch(object):
+ """Carries a bunch of named attributes (from Alex Martelli bunch)"""
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)