summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authoralalazo <massimiliano.culpo@googlemail.com>2016-10-05 09:33:59 +0200
committeralalazo <massimiliano.culpo@googlemail.com>2016-10-05 09:33:59 +0200
commit7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25 (patch)
treee37497aff3b3b24618677138efd68fde37b1916e /lib
parent47f6a6d3cfd1285fb5d8cd95a415c8a24045b330 (diff)
parentbff1656a1a842b4de217129cc8cba732a30928a7 (diff)
downloadspack-7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25.tar.gz
spack-7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25.tar.bz2
spack-7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25.tar.xz
spack-7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25.zip
Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases_rebase
Conflicts: lib/spack/spack/cmd/setup.py lib/spack/spack/package.py var/spack/repos/builtin/packages/gmp/package.py
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/Makefile4
-rw-r--r--lib/spack/docs/command_index.in3
-rw-r--r--lib/spack/docs/conf.py25
-rw-r--r--lib/spack/docs/configuration.rst2
-rw-r--r--lib/spack/docs/packaging_guide.rst28
-rwxr-xr-xlib/spack/env/cc32
-rw-r--r--lib/spack/llnl/util/filesystem.py197
-rw-r--r--lib/spack/llnl/util/lang.py16
-rw-r--r--lib/spack/llnl/util/lock.py11
-rw-r--r--lib/spack/spack/abi.py4
-rw-r--r--lib/spack/spack/build_environment.py23
-rw-r--r--lib/spack/spack/cmd/__init__.py10
-rw-r--r--lib/spack/spack/cmd/checksum.py2
-rw-r--r--lib/spack/spack/cmd/debug.py84
-rw-r--r--lib/spack/spack/cmd/diy.py6
-rw-r--r--lib/spack/spack/cmd/graph.py46
-rw-r--r--lib/spack/spack/cmd/location.py5
-rw-r--r--lib/spack/spack/cmd/module.py2
-rw-r--r--lib/spack/spack/cmd/setup.py3
-rw-r--r--lib/spack/spack/cmd/test.py2
-rw-r--r--lib/spack/spack/database.py100
-rw-r--r--lib/spack/spack/directory_layout.py2
-rw-r--r--lib/spack/spack/fetch_strategy.py13
-rw-r--r--lib/spack/spack/graph.py133
-rw-r--r--lib/spack/spack/hooks/__init__.py14
-rw-r--r--lib/spack/spack/hooks/lmodmodule.py35
-rw-r--r--lib/spack/spack/hooks/sbang.py12
-rw-r--r--lib/spack/spack/modules.py243
-rw-r--r--lib/spack/spack/operating_systems/mac_os.py2
-rw-r--r--lib/spack/spack/package.py23
-rw-r--r--lib/spack/spack/platforms/cray.py109
-rw-r--r--lib/spack/spack/schema/modules.py15
-rw-r--r--lib/spack/spack/spec.py162
-rw-r--r--lib/spack/spack/stage.py34
-rw-r--r--lib/spack/spack/test/__init__.py1
-rw-r--r--lib/spack/spack/test/cc.py66
-rw-r--r--lib/spack/spack/test/database.py41
-rw-r--r--lib/spack/spack/test/library_list.py111
-rw-r--r--lib/spack/spack/test/lock.py29
-rw-r--r--lib/spack/spack/test/modules.py394
-rw-r--r--lib/spack/spack/test/spec_dag.py6
-rw-r--r--lib/spack/spack/test/spec_syntax.py54
42 files changed, 1590 insertions, 514 deletions
diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile
index 95d26041b7..1b56959aad 100644
--- a/lib/spack/docs/Makefile
+++ b/lib/spack/docs/Makefile
@@ -2,12 +2,12 @@
#
# You can set these variables from the command line.
-SPHINXOPTS =
+SPHINXOPTS = -E
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
-export PYTHONPATH = ../../spack
+export PYTHONPATH := ../../spack:$(PYTHONPATH)
APIDOC_FILES = spack*.rst
# Internal variables.
diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in
index 94cdf38109..8c5c97dd9c 100644
--- a/lib/spack/docs/command_index.in
+++ b/lib/spack/docs/command_index.in
@@ -1,5 +1,4 @@
-.. _command_index:
-
+=================
Command index
=================
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 43d37f1526..57469964ee 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -1,10 +1,7 @@
# flake8: noqa
##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# Created by Todd Gamblin, tgamblin@llnl.gov.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
@@ -65,14 +62,15 @@ spack_version = subprocess.Popen(
# Set an environment variable so that colify will print output like it would to
# a terminal.
-os.environ['COLIFY_SIZE'] = '25x80'
+os.environ['COLIFY_SIZE'] = '25x120'
#
# Generate package list using spack command
#
-with open('package_list.rst', 'w') as plist_file:
- subprocess.Popen(
- [spack_root + '/bin/spack', 'package-list'], stdout=plist_file)
+if not os.path.exists('package_list.rst'):
+ with open('package_list.rst', 'w') as plist_file:
+ subprocess.Popen(
+ [spack_root + '/bin/spack', 'package-list'], stdout=plist_file)
#
# Find all the `spack-*` references and add them to a command index
@@ -85,11 +83,12 @@ for filename in glob('*rst'):
if match:
command_names.append(match.group(1).strip())
-shutil.copy('command_index.in', 'command_index.rst')
-with open('command_index.rst', 'a') as index:
- index.write('\n')
- for cmd in sorted(command_names):
- index.write(' * :ref:`%s`\n' % cmd)
+if not os.path.exists('command_index.rst'):
+ shutil.copy('command_index.in', 'command_index.rst')
+ with open('command_index.rst', 'a') as index:
+ index.write('\n')
+ for cmd in sorted(command_names):
+ index.write(' * :ref:`%s`\n' % cmd)
# Run sphinx-apidoc
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index ba534d1e62..6de823c845 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -207,7 +207,7 @@ supply ``-p`` to Spack on the command line, before any subcommands.
``spack --profile`` output looks like this:
-.. command-output:: spack --profile graph dyninst
+.. command-output:: spack --profile graph --deptype=nobuild dyninst
:ellipsis: 25
The bottom of the output shows the top most time consuming functions,
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 6936b5e423..70cd58f6c1 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -2090,12 +2090,11 @@ Blas and Lapack libraries
Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
The names of the resulting static and/or shared libraries differ from package
-to package. In order to make the ``install()`` method indifferent to the
+to package. In order to make the ``install()`` method independent of the
choice of ``Blas`` implementation, each package which provides it
-sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib`` to
-point to the shared and static ``Blas`` libraries, respectively. The same
-applies to packages which provide ``Lapack``. Package developers are advised to
-use these variables, for example ``spec['blas'].blas_shared_lib`` instead of
+sets up ``self.spec.blas_libs`` to point to the correct ``Blas`` libraries.
+The same applies to packages which provide ``Lapack``. Package developers are advised to
+use these variables, for example ``spec['blas'].blas_libs.joined()`` instead of
hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
^^^^^^^^^^^^^^^^^^^^^
@@ -2889,9 +2888,22 @@ dependency graph. For example:
.. command-output:: spack graph mpileaks
-At the top is the root package in the DAG, with dependency edges
-emerging from it. On a color terminal, the edges are colored by which
-dependency they lead to.
+At the top is the root package in the DAG, with dependency edges emerging
+from it. On a color terminal, the edges are colored by which dependency
+they lead to.
+
+.. command-output:: spack graph --deptype=all mpileaks
+
+The ``deptype`` argument tells Spack what types of dependencies to graph.
+By default it includes link and run dependencies but not build
+dependencies. Supplying ``--deptype=all`` will show the build
+dependencies as well. This is equivalent to
+``--deptype=build,link,run``. Options for ``deptype`` include:
+
+* Any combination of ``build``, ``link``, and ``run`` separated by
+ commas.
+* ``nobuild``, ``nolink``, ``norun`` to omit one type.
+* ``all`` or ``alldeps`` for all types of dependencies.
You can also use ``spack graph`` to generate graphs in the widely used
`Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index c6bb50d261..4b8922178a 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -266,22 +266,38 @@ for dep in "${deps[@]}"; do
# Prepend lib and RPATH directories
if [[ -d $dep/lib ]]; then
if [[ $mode == ccld ]]; then
- $add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
- args=("-L$dep/lib" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib" "${args[@]}")
+ fi
elif [[ $mode == ld ]]; then
- $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
- args=("-L$dep/lib" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib" "${args[@]}")
+ fi
fi
fi
# Prepend lib64 and RPATH directories
if [[ -d $dep/lib64 ]]; then
if [[ $mode == ccld ]]; then
- $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
- args=("-L$dep/lib64" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib64" "${args[@]}")
+ fi
elif [[ $mode == ld ]]; then
- $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
- args=("-L$dep/lib64" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib64" "${args[@]}")
+ fi
fi
fi
done
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 22ca85abf9..c3ecfde4f4 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -22,18 +22,22 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
+import collections
+import errno
+import fileinput
+import getpass
import glob
+import numbers
+import os
import re
import shutil
import stat
-import errno
-import getpass
-from contextlib import contextmanager
import subprocess
-import fileinput
+import sys
+from contextlib import contextmanager
import llnl.util.tty as tty
+from llnl.util.lang import dedupe
__all__ = ['set_install_permissions', 'install', 'install_tree',
'traverse_tree',
@@ -42,8 +46,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree',
'filter_file',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
- 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
- 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
+ 'remove_dead_links', 'remove_linked_tree',
+ 'fix_darwin_install_name', 'find_libraries', 'LibraryList']
def filter_file(regex, repl, *filenames, **kwargs):
@@ -326,7 +330,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
- order = kwargs.get('order', 'pre')
+ order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
@@ -338,7 +342,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
return
source_path = os.path.join(source_root, rel_path)
- dest_path = os.path.join(dest_root, rel_path)
+ dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
@@ -346,8 +350,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
- dest_child = os.path.join(dest_path, f)
- rel_child = os.path.join(rel_path, f)
+ dest_child = os.path.join(dest_path, f)
+ rel_child = os.path.join(rel_path, f)
# Treat as a directory
if os.path.isdir(source_child) and (
@@ -440,35 +444,162 @@ def fix_darwin_install_name(path):
stdout=subprocess.PIPE).communicate()[0]
break
+# Utilities for libraries
+
-def to_lib_name(library):
- """Transforms a path to the library /path/to/lib<name>.xyz into <name>
+class LibraryList(collections.Sequence):
+ """Sequence of absolute paths to libraries
+
+ Provides a few convenience methods to manipulate library paths and get
+ commonly used compiler flags or names
"""
- # Assume libXYZ.suffix
- return os.path.basename(library)[3:].split(".")[0]
+ def __init__(self, libraries):
+ self.libraries = list(libraries)
-def to_link_flags(library):
- """Transforms a path to a <library> into linking flags -L<dir> -l<name>.
+ @property
+ def directories(self):
+ """Stable de-duplication of the directories where the libraries
+ reside
- Return:
- A string of linking flags.
- """
- dir = os.path.dirname(library)
- name = to_lib_name(library)
- res = '-L%s -l%s' % (dir, name)
- return res
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a'])
+ >>> assert l.directories == ['/dir1', '/dir2']
+ """
+ return list(dedupe(
+ os.path.dirname(x) for x in self.libraries if os.path.dirname(x)
+ ))
+
+ @property
+ def basenames(self):
+ """Stable de-duplication of the base-names in the list
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a'])
+ >>> assert l.basenames == ['liba.a', 'libb.a']
+ """
+ return list(dedupe(os.path.basename(x) for x in self.libraries))
+
+ @property
+ def names(self):
+ """Stable de-duplication of library names in the list
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so'])
+ >>> assert l.names == ['a', 'b']
+ """
+ return list(dedupe(x.split('.')[0][3:] for x in self.basenames))
+
+ @property
+ def search_flags(self):
+ """Search flags for the libraries
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-L/dir1 -L/dir2'
+ """
+ return ' '.join(['-L' + x for x in self.directories])
+
+ @property
+ def link_flags(self):
+ """Link flags for the libraries
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-la -lb'
+ """
+ return ' '.join(['-l' + name for name in self.names])
+ @property
+ def ld_flags(self):
+ """Search flags + link flags
-def find_library_path(libname, *paths):
- """Searches for a file called <libname> in each path.
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-L/dir1 -L/dir2 -la -lb'
+ """
+ return self.search_flags + ' ' + self.link_flags
- Return:
- directory where the library was found, if found. None otherwise.
+ def __getitem__(self, item):
+ cls = type(self)
+ if isinstance(item, numbers.Integral):
+ return self.libraries[item]
+ return cls(self.libraries[item])
+ def __add__(self, other):
+ return LibraryList(dedupe(self.libraries + list(other)))
+
+ def __radd__(self, other):
+ return self.__add__(other)
+
+ def __eq__(self, other):
+ return self.libraries == other.libraries
+
+ def __len__(self):
+ return len(self.libraries)
+
+ def joined(self, separator=' '):
+ return separator.join(self.libraries)
+
+ def __repr__(self):
+ return self.__class__.__name__ + '(' + repr(self.libraries) + ')'
+
+ def __str__(self):
+ return self.joined()
+
+
+def find_libraries(args, root, shared=True, recurse=False):
+ """Returns an iterable object containing a list of full paths to
+ libraries if found.
+
+ Args:
+ args: iterable object containing a list of library names to \
+ search for (e.g. 'libhdf5')
+ root: root folder where to start searching
+ shared: if True searches for shared libraries, otherwise for static
+ recurse: if False search only root folder, if True descends top-down \
+ from the root
+
+ Returns:
+ list of full paths to the libraries that have been found
"""
- for path in paths:
- library = join_path(path, libname)
- if os.path.exists(library):
- return path
- return None
+ if not isinstance(args, collections.Sequence) or isinstance(args, str):
+ message = '{0} expects a sequence of strings as first argument'
+ message += ' [got {1} instead]'
+ raise TypeError(message.format(find_libraries.__name__, type(args)))
+
+ # Construct the right suffix for the library
+ if shared is True:
+ suffix = 'dylib' if sys.platform == 'darwin' else 'so'
+ else:
+ suffix = 'a'
+ # List of libraries we are searching with suffixes
+ libraries = ['{0}.{1}'.format(lib, suffix) for lib in args]
+ # Search method
+ if recurse is False:
+ search_method = _find_libraries_non_recursive
+ else:
+ search_method = _find_libraries_recursive
+
+ return search_method(libraries, root)
+
+
+def _find_libraries_recursive(libraries, root):
+ library_dict = collections.defaultdict(list)
+ for path, _, files in os.walk(root):
+ for lib in libraries:
+ if lib in files:
+ library_dict[lib].append(
+ join_path(path, lib)
+ )
+ answer = []
+ for lib in libraries:
+ answer.extend(library_dict[lib])
+ return LibraryList(answer)
+
+
+def _find_libraries_non_recursive(libraries, root):
+
+ def lib_or_none(lib):
+ library = join_path(root, lib)
+ if not os.path.exists(library):
+ return None
+ return library
+
+ return LibraryList(
+ [lib_or_none(lib) for lib in libraries if lib_or_none(lib) is not None]
+ )
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index df32012e2d..253334c416 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -374,6 +374,22 @@ def DictWrapper(dictionary):
return wrapper()
+def dedupe(sequence):
+ """Yields a stable de-duplication of an hashable sequence
+
+ Args:
+ sequence: hashable sequence to be de-duplicated
+
+ Returns:
+ stable de-duplication of the sequence
+ """
+ seen = set()
+ for x in sequence:
+ if x not in seen:
+ yield x
+ seen.add(x)
+
+
class RequiredAttributeError(ValueError):
def __init__(self, message):
diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
index aa8272d5fe..f5f53101ae 100644
--- a/lib/spack/llnl/util/lock.py
+++ b/lib/spack/llnl/util/lock.py
@@ -69,8 +69,17 @@ class Lock(object):
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
+ # If this is already open read-only and we want to
+ # upgrade to an exclusive write lock, close first.
+ if self._fd is not None:
+ flags = fcntl.fcntl(self._fd, fcntl.F_GETFL)
+ if op == fcntl.LOCK_EX and flags | os.O_RDONLY:
+ os.close(self._fd)
+ self._fd = None
+
if self._fd is None:
- self._fd = os.open(self._file_path, os.O_RDWR)
+ mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY
+ self._fd = os.open(self._file_path, mode)
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index 064abb9782..7a6f9c3f40 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -54,10 +54,10 @@ class ABI(object):
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
- libname = "libstdc++.so"
+ libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
- libname = "libgcc_s.so"
+ libname = "libgcc_s." + dso_suffix
else:
return None
try:
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 9f7cf0bb1b..c7cfcf9328 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -75,6 +75,8 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
#
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
+SPACK_RPATH_DEPS = 'SPACK_RPATH_DEPS'
+SPACK_LINK_DEPS = 'SPACK_LINK_DEPS'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
@@ -252,9 +254,15 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
- dep_prefixes = [d.prefix
- for d in pkg.spec.traverse(root=False, deptype='build')]
+ dep_prefixes = [d.prefix for d in
+ pkg.spec.traverse(root=False, deptype=('build', 'link'))]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
+
+ # These variables control compiler wrapper behavior
+ env.set_path(SPACK_RPATH_DEPS, [d.prefix for d in get_rpath_deps(pkg)])
+ env.set_path(SPACK_LINK_DEPS, [
+ d.prefix for d in pkg.spec.traverse(root=False, deptype=('link'))])
+
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
@@ -286,8 +294,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.remove_path('PATH', p)
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = reversed(
- filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
+ bin_dirs = reversed(filter(os.path.isdir, [
+ '%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')]))
for item in bin_dirs:
env.prepend_path('PATH', item)
@@ -372,10 +380,15 @@ def set_module_variables_for_package(pkg, module):
m.dso_suffix = dso_suffix
+def get_rpath_deps(pkg):
+ """We only need to RPATH immediate dependencies."""
+ return pkg.spec.dependencies(deptype='link')
+
+
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
- deps = pkg.spec.dependencies(deptype='link')
+ deps = get_rpath_deps(pkg)
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in deps
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index f69f434afd..6b1561b7fc 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -69,17 +69,17 @@ def get_cmd_function_name(name):
def get_module(name):
"""Imports the module for a particular command name and returns it."""
module_name = "%s.%s" % (__name__, name)
- module = __import__(
- module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
- level=0)
+ module = __import__(module_name,
+ fromlist=[name, SETUP_PARSER, DESCRIPTION],
+ level=0)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
if not hasattr(module, fn_name):
- tty.die("Command module %s (%s) must define function '%s'."
- % (module.__name__, module.__file__, fn_name))
+ tty.die("Command module %s (%s) must define function '%s'." %
+ (module.__name__, module.__file__, fn_name))
return module
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index aedb0fd99c..2e24d0527e 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -113,6 +113,6 @@ def checksum(parser, args):
tty.die("Could not fetch any versions for %s" % pkg.name)
version_lines = [
- " version('%s', '%s')" % (v, h) for v, h in version_hashes
+ " version('%s', '%s')" % (v, h) for v, h in version_hashes
]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py
new file mode 100644
index 0000000000..958eb829b4
--- /dev/null
+++ b/lib/spack/spack/cmd/debug.py
@@ -0,0 +1,84 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from datetime import datetime
+from glob import glob
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import working_dir
+
+import spack
+from spack.util.executable import which
+
+description = "Debugging commands for troubleshooting Spack."
+
+
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
+ sp.add_parser('create-db-tarball',
+ help="Create a tarball of Spack's installation metadata.")
+
+
+def _debug_tarball_suffix():
+ now = datetime.now()
+ suffix = now.strftime('%Y-%m-%d-%H%M%S')
+
+ git = which('git')
+ if not git:
+ return 'nobranch-nogit-%s' % suffix
+
+ with working_dir(spack.spack_root):
+ if not os.path.isdir('.git'):
+ return 'nobranch.nogit.%s' % suffix
+
+ symbolic = git(
+ 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
+ commit = git(
+ 'rev-parse', '--short', 'HEAD', output=str).strip()
+
+ if symbolic == commit:
+ return "nobranch.%s.%s" % (commit, suffix)
+ else:
+ return "%s.%s.%s" % (symbolic, commit, suffix)
+
+
+def create_db_tarball(args):
+ tar = which('tar')
+ tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
+ tarball_path = os.path.abspath(tarball_name)
+
+ with working_dir(spack.spack_root):
+ files = [spack.installed_db._index_path]
+ files += glob('%s/*/*/*/.spack/spec.yaml' % spack.install_path)
+ files = [os.path.relpath(f) for f in files]
+
+ tar('-czf', tarball_path, *files)
+
+ tty.msg('Created %s' % tarball_name)
+
+
+def debug(parser, args):
+ action = {'create-db-tarball': create_db_tarball}
+ action[args.debug_command](args)
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index 487654d261..9833e8cdce 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -52,6 +52,9 @@ def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND version.")
+ subparser.add_argument(
+ '--dirty', action='store_true', dest='dirty',
+ help="Install a package *without* cleaning the environment.")
def diy(self, args):
@@ -100,4 +103,5 @@ def diy(self, args):
keep_prefix=args.keep_prefix,
ignore_deps=args.ignore_deps,
verbose=not args.quiet,
- keep_stage=True) # don't remove source dir for DIY.
+ keep_stage=True, # don't remove source dir for DIY.
+ dirty=args.dirty)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index 8faabfbb7b..ed3aed1946 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -24,8 +24,11 @@
##############################################################################
import argparse
+import llnl.util.tty as tty
+
import spack
import spack.cmd
+from spack.spec import *
from spack.graph import *
description = "Generate graphs of package dependency relationships."
@@ -36,15 +39,28 @@ def setup_parser(subparser):
method = subparser.add_mutually_exclusive_group()
method.add_argument(
- '--ascii', action='store_true',
+ '-a', '--ascii', action='store_true',
help="Draw graph as ascii to stdout (default).")
method.add_argument(
- '--dot', action='store_true',
+ '-d', '--dot', action='store_true',
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
- '--concretize', action='store_true',
- help="Concretize specs before graphing.")
+ '-n', '--normalize', action='store_true',
+ help="Skip concretization; only print normalized spec.")
+
+ subparser.add_argument(
+ '-s', '--static', action='store_true',
+ help="Use static information from packages, not dynamic spec info.")
+
+ subparser.add_argument(
+ '-i', '--installed', action='store_true',
+ help="Graph all installed specs in dot format (implies --dot).")
+
+ subparser.add_argument(
+ '-t', '--deptype', action='store',
+ help="Comma-separated list of deptypes to traverse. default=%s."
+ % ','.join(alldeps))
subparser.add_argument(
'specs', nargs=argparse.REMAINDER,
@@ -52,18 +68,32 @@ def setup_parser(subparser):
def graph(parser, args):
- specs = spack.cmd.parse_specs(
- args.specs, normalize=True, concretize=args.concretize)
+ concretize = not args.normalize
+ if args.installed:
+ if args.specs:
+ tty.die("Can't specify specs with --installed")
+ args.dot = True
+ specs = spack.installed_db.query()
+
+ else:
+ specs = spack.cmd.parse_specs(
+ args.specs, normalize=True, concretize=concretize)
if not specs:
setup_parser.parser.print_help()
return 1
+ deptype = nobuild
+ if args.deptype:
+ deptype = tuple(args.deptype.split(','))
+ validate_deptype(deptype)
+ deptype = canonical_deptype(deptype)
+
if args.dot: # Dot graph only if asked for.
- graph_dot(*specs)
+ graph_dot(specs, static=args.static, deptype=deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly
- graph_ascii(specs[0], debug=spack.debug)
+ graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index b9c8b5c330..54f7185707 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -25,7 +25,6 @@
import argparse
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
import spack
import spack.cmd
@@ -77,7 +76,7 @@ def location(parser, args):
print spack.prefix
elif args.packages:
- print spack.repo.root
+ print spack.repo.first_repo().root
elif args.stages:
print spack.stage_path
@@ -99,7 +98,7 @@ def location(parser, args):
if args.package_dir:
# This one just needs the spec name.
- print join_path(spack.repo.root, spec.name)
+ print spack.repo.dirname_for_package_name(spec.name)
else:
# These versions need concretized specs.
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 2d0b83fe00..c6fa84109e 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -29,10 +29,10 @@ import os
import shutil
import sys
+import llnl.util.filesystem as filesystem
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
-import llnl.util.filesystem as filesystem
from spack.modules import module_types
description = "Manipulate module files"
diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py
index 652c08354f..f39a827a8d 100644
--- a/lib/spack/spack/cmd/setup.py
+++ b/lib/spack/spack/cmd/setup.py
@@ -48,6 +48,9 @@ def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND version.")
+ subparser.add_argument(
+ '--dirty', action='store_true', dest='dirty',
+ help="Install a package *without* cleaning the environment.")
def spack_transitive_include_path():
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index bf7342f606..52c2a06778 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -56,7 +56,7 @@ class MockCache(object):
def store(self, copyCmd, relativeDst):
pass
- def fetcher(self, targetPath, digest):
+ def fetcher(self, targetPath, digest, **kwargs):
return MockCacheFetcher()
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index e450b4d424..f73d3765c8 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -49,8 +49,9 @@ from llnl.util.filesystem import *
from llnl.util.lock import *
import spack.spec
+from spack.directory_layout import DirectoryLayoutError
from spack.version import Version
-from spack.spec import Spec
+from spack.spec import *
from spack.error import SpackError
from spack.repository import UnknownPackageError
import spack.util.spack_yaml as syaml
@@ -64,6 +65,9 @@ _db_version = Version('0.9.2')
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
+# Types of dependencies tracked by the database
+_tracked_deps = nobuild
+
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
@@ -232,8 +236,6 @@ class Database(object):
spec.format('$_$#'), dname, dhash[:7]))
continue
- # defensive copy (not sure everything handles extra
- # parent links yet)
child = data[dhash].spec
spec._add_dependency(child, dtypes)
@@ -328,7 +330,7 @@ class Database(object):
self._data = data
def reindex(self, directory_layout):
- """Build database index from scratch based from a directory layout.
+ """Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already.
@@ -359,9 +361,6 @@ class Database(object):
# Ask the directory layout to traverse the filesystem.
for spec in directory_layout.all_specs():
- # Create a spec for each known package and add it.
- path = directory_layout.path_for_spec(spec)
-
# Try to recover explicit value from old DB, but
# default it to False if DB was corrupt.
explicit = False
@@ -370,7 +369,7 @@ class Database(object):
if old_info is not None:
explicit = old_info.explicit
- self._add(spec, path, directory_layout, explicit=explicit)
+ self._add(spec, directory_layout, explicit=explicit)
self._check_ref_counts()
@@ -389,10 +388,7 @@ class Database(object):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
- # XXX(deptype): This checks all dependencies, but build
- # dependencies might be able to be dropped in the
- # future.
- for dep in rec.spec.dependencies():
+ for dep in rec.spec.dependencies(_tracked_deps):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@@ -450,52 +446,62 @@ class Database(object):
# reindex() takes its own write lock, so no lock here.
self.reindex(spack.install_layout)
- def _add(self, spec, path, directory_layout=None, explicit=False):
- """Add an install record for spec at path to the database.
+ def _add(self, spec, directory_layout=None, explicit=False):
+ """Add an install record for this spec to the database.
- This assumes that the spec is not already installed. It
- updates the ref counts on dependencies of the spec in the DB.
+ Assumes spec is installed in ``layout.path_for_spec(spec)``.
- This operation is in-memory, and does not lock the DB.
+ Also ensures dependencies are present and updated in the DB as
+ either intsalled or missing.
"""
- key = spec.dag_hash()
- if key in self._data:
- rec = self._data[key]
- rec.installed = True
+ if not spec.concrete:
+ raise NonConcreteSpecAddError(
+ "Specs added to DB must be concrete.")
- # TODO: this overwrites a previous install path (when path !=
- # self._data[key].path), and the old path still has a
- # dependent in the DB. We could consider re-RPATH-ing the
- # dependents. This case is probably infrequent and may not be
- # worth fixing, but this is where we can discover it.
- rec.path = path
-
- else:
- self._data[key] = InstallRecord(spec, path, True,
- explicit=explicit)
- for dep in spec.dependencies(('link', 'run')):
- self._increment_ref_count(dep, directory_layout)
+ for dep in spec.dependencies(_tracked_deps):
+ dkey = dep.dag_hash()
+ if dkey not in self._data:
+ self._add(dep, directory_layout, explicit=False)
- def _increment_ref_count(self, spec, directory_layout=None):
- """Recursively examine dependencies and update their DB entries."""
key = spec.dag_hash()
if key not in self._data:
installed = False
path = None
if directory_layout:
path = directory_layout.path_for_spec(spec)
- installed = os.path.isdir(path)
+ try:
+ directory_layout.check_installed(spec)
+ installed = True
+ except DirectoryLayoutError as e:
+ tty.warn(
+ 'Dependency missing due to corrupt install directory:',
+ path, str(e))
+
+ # Create a new install record with no deps initially.
+ new_spec = spec.copy(deps=False)
+ self._data[key] = InstallRecord(
+ new_spec, path, installed, ref_count=0, explicit=explicit)
+
+ # Connect dependencies from the DB to the new copy.
+ for name, dep in spec.dependencies_dict(_tracked_deps).iteritems():
+ dkey = dep.spec.dag_hash()
+ new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
+ self._data[dkey].ref_count += 1
+
+ # Mark concrete once everything is built, and preserve
+ # the original hash of concrete specs.
+ new_spec._mark_concrete()
+ new_spec._hash = key
- self._data[key] = InstallRecord(spec.copy(), path, installed)
-
- for dep in spec.dependencies(('link', 'run')):
- self._increment_ref_count(dep)
+ else:
+ # If it is already there, mark it as installed.
+ self._data[key].installed = True
- self._data[key].ref_count += 1
+ self._data[key].explicit = explicit
@_autospec
- def add(self, spec, path, explicit=False):
+ def add(self, spec, directory_layout, explicit=False):
"""Add spec at path to database, locking and reading DB to sync.
``add()`` will lock and read from the DB on disk.
@@ -504,7 +510,7 @@ class Database(object):
# TODO: ensure that spec is concrete?
# Entire add is transactional.
with self.write_transaction():
- self._add(spec, path, explicit=explicit)
+ self._add(spec, directory_layout, explicit=explicit)
def _get_matching_spec_key(self, spec, **kwargs):
"""Get the exact spec OR get a single spec that matches."""
@@ -534,7 +540,7 @@ class Database(object):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
- for dep in spec.dependencies('link'):
+ for dep in spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
def _remove(self, spec):
@@ -548,7 +554,7 @@ class Database(object):
return rec.spec
del self._data[key]
- for dep in rec.spec.dependencies('link'):
+ for dep in rec.spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a
@@ -657,6 +663,10 @@ class CorruptDatabaseError(SpackError):
"""Raised when errors are found while reading the database."""
+class NonConcreteSpecAddError(SpackError):
+ """Raised when attemptint to add non-concrete spec to DB."""
+
+
class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 73286483ef..8ef7d3c480 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -423,7 +423,7 @@ class RemoveFailedError(DirectoryLayoutError):
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
- % prefix, installed_spec.short_spec, error)
+ % (prefix, installed_spec.short_spec, error))
self.cause = error
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index c69a23033c..21802c4556 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -170,12 +170,11 @@ class URLFetchStrategy(FetchStrategy):
tty.msg("Already downloaded %s" % self.archive_file)
return
- possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
- if possible_files:
- save_file = self.stage.expected_archive_files[0]
- partial_file = self.stage.expected_archive_files[0] + '.part'
+ if self.stage.save_filename:
+ save_file = self.stage.save_filename
+ partial_file = self.stage.save_filename + '.part'
tty.msg("Trying to fetch from %s" % self.url)
@@ -307,7 +306,7 @@ class URLFetchStrategy(FetchStrategy):
if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.")
- shutil.copy(self.archive_file, destination)
+ shutil.copyfile(self.archive_file, destination)
@_needs_stage
def check(self):
@@ -858,9 +857,9 @@ class FsCache(object):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
- def fetcher(self, targetPath, digest):
+ def fetcher(self, targetPath, digest, **kwargs):
url = "file://" + join_path(self.root, targetPath)
- return CacheURLFetchStrategy(url, digest)
+ return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index b875e9da99..9753741c61 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -67,22 +67,20 @@ from heapq import *
from llnl.util.lang import *
from llnl.util.tty.color import *
-import spack
-from spack.spec import Spec
+from spack.spec import *
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
-def topological_sort(spec, **kwargs):
+def topological_sort(spec, reverse=False, deptype=None):
"""Topological sort for specs.
Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process.
"""
- reverse = kwargs.get('reverse', False)
- # XXX(deptype): iterate over a certain kind of dependency. Maybe color
- # edges based on the type of dependency?
+ deptype = canonical_deptype(deptype)
+
if not reverse:
parents = lambda s: s.dependents()
children = lambda s: s.dependencies()
@@ -91,7 +89,7 @@ def topological_sort(spec, **kwargs):
children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
- spec = spec.copy()
+ spec = spec.copy(deps=deptype)
nodes = spec.index()
topo_order = []
@@ -129,7 +127,7 @@ def find(seq, predicate):
return -1
-# Names of different graph line states. We Record previous line
+# Names of different graph line states. We record previous line
# states so that we can easily determine what to do when connecting.
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
@@ -143,6 +141,7 @@ class AsciiGraph(object):
self.node_character = '*'
self.debug = False
self.indent = 0
+ self.deptype = alldeps
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@@ -162,6 +161,9 @@ class AsciiGraph(object):
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
+ # Ignore empty frontier entries (they're just collapsed)
+ if not self._frontier[index]:
+ return
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
@@ -386,7 +388,7 @@ class AsciiGraph(object):
self._out = ColorStream(sys.stdout, color=color)
# We'll traverse the spec in topo order as we graph it.
- topo_order = topological_sort(spec, reverse=True)
+ topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
# Work on a copy to be nondestructive
spec = spec.copy()
@@ -420,20 +422,26 @@ class AsciiGraph(object):
if back:
back.sort()
prev_ends = []
+ collapse_l1 = False
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
if i - b > 1:
- self._back_edge_line(prev_ends, b, i, False,
- 'left-1')
+ collapse_l1 = any(not e for e in self._frontier)
+ self._back_edge_line(
+ prev_ends, b, i, collapse_l1, 'left-1')
del prev_ends[:]
prev_ends.append(b)
# Check whether we did ALL the deps as back edges,
# in which case we're done.
- collapse = not self._frontier[i]
- if collapse:
+ pop = not self._frontier[i]
+ collapse_l2 = pop
+ if collapse_l1:
+ collapse_l2 = False
+ if pop:
self._frontier.pop(i)
- self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2')
+ self._back_edge_line(
+ prev_ends, -1, -1, collapse_l2, 'left-2')
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
@@ -476,32 +484,28 @@ class AsciiGraph(object):
# Replace node with its dependencies
self._frontier.pop(i)
- if node.dependencies():
- deps = sorted((d.name for d in node.dependencies()),
- reverse=True)
+ deps = node.dependencies(self.deptype)
+ if deps:
+ deps = sorted((d.name for d in deps), reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
-def graph_ascii(spec, **kwargs):
- node_character = kwargs.get('node', 'o')
- out = kwargs.pop('out', None)
- debug = kwargs.pop('debug', False)
- indent = kwargs.pop('indent', 0)
- color = kwargs.pop('color', None)
- check_kwargs(kwargs, graph_ascii)
-
+def graph_ascii(spec, node='o', out=None, debug=False,
+ indent=0, color=None, deptype=None):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
- graph.node_character = node_character
+ graph.node_character = node
+ if deptype:
+ graph.deptype = canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
-def graph_dot(*specs, **kwargs):
+def graph_dot(specs, deptype=None, static=False, out=None):
"""Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between
@@ -510,42 +514,73 @@ def graph_dot(*specs, **kwargs):
spack graph --dot qt | dot -Tpdf > spack-graph.pdf
"""
- out = kwargs.pop('out', sys.stdout)
- check_kwargs(kwargs, graph_dot)
+ if out is None:
+ out = sys.stdout
+
+ if deptype is None:
+ deptype = alldeps
out.write('digraph G {\n')
- out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
- out.write(' rankdir = "LR"\n')
+ out.write(' rankdir = "TB"\n')
out.write(' ranksep = "5"\n')
+ out.write('node[\n')
+ out.write(' fontname=Monaco,\n')
+ out.write(' penwidth=2,\n')
+ out.write(' fontsize=12,\n')
+ out.write(' margin=.1,\n')
+ out.write(' shape=box,\n')
+ out.write(' fillcolor=lightblue,\n')
+ out.write(' style="rounded,filled"]\n')
+
out.write('\n')
- def quote(string):
+ def q(string):
return '"%s"' % string
if not specs:
- specs = [p.name for p in spack.repo.all_packages()]
- else:
- roots = specs
- specs = set()
- for spec in roots:
- specs.update(Spec(s.name) for s in spec.normalized().traverse())
+ raise ValueError("Must provide specs ot graph_dot")
+
+ # Static graph includes anything a package COULD depend on.
+ if static:
+ names = set.union(*[s.package.possible_dependencies() for s in specs])
+ specs = [Spec(name) for name in names]
- deps = []
+ labeled = set()
+
+ def label(key, label):
+ if key not in labeled:
+ out.write(' "%s" [label="%s"]\n' % (key, label))
+ labeled.add(key)
+
+ deps = set()
for spec in specs:
- out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name))
+ if static:
+ out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name))
+
+ # Skip virtual specs (we'll find out about them from concrete ones.
+ if spec.virtual:
+ continue
- # Skip virtual specs (we'll find out about them from concrete ones.
- if spec.virtual:
- continue
+ # Add edges for each depends_on in the package.
+ for dep_name, dep in spec.package.dependencies.iteritems():
+ deps.add((spec.name, dep_name))
+
+ # If the package provides something, add an edge for that.
+ for provider in set(s.name for s in spec.package.provided):
+ deps.add((provider, spec.name))
+
+ else:
+ def key_label(s):
+ return s.dag_hash(), "%s-%s" % (s.name, s.dag_hash(7))
- # Add edges for each depends_on in the package.
- for dep_name, dep in spec.package.dependencies.iteritems():
- deps.append((spec.name, dep_name))
+ for s in spec.traverse(deptype=deptype):
+ skey, slabel = key_label(s)
+ out.write(' "%s" [label="%s"]\n' % (skey, slabel))
- # If the package provides something, add an edge for that.
- for provider in set(s.name for s in spec.package.provided):
- deps.append((provider, spec.name))
+ for d in s.dependencies(deptype=deptype):
+ dkey, _ = key_label(d)
+ deps.add((skey, dkey))
out.write('\n')
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index c7c84defa0..ff4ebc2e57 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -24,7 +24,7 @@
##############################################################################
"""This package contains modules with hooks for various stages in the
Spack install process. You can add modules here and they'll be
- executaed by package at various times during the package lifecycle.
+ executed by package at various times during the package lifecycle.
Each hook is just a function that takes a package as a parameter.
Hooks are not executed in any particular order.
@@ -41,9 +41,10 @@
features.
"""
import imp
-from llnl.util.lang import memoized, list_modules
-from llnl.util.filesystem import join_path
+
import spack
+from llnl.util.filesystem import join_path
+from llnl.util.lang import memoized, list_modules
@memoized
@@ -70,12 +71,11 @@ class HookRunner(object):
if hasattr(hook, '__call__'):
hook(pkg)
-
#
# Define some functions that can be called to fire off hooks.
#
-pre_install = HookRunner('pre_install')
-post_install = HookRunner('post_install')
+pre_install = HookRunner('pre_install')
+post_install = HookRunner('post_install')
-pre_uninstall = HookRunner('pre_uninstall')
+pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')
diff --git a/lib/spack/spack/hooks/lmodmodule.py b/lib/spack/spack/hooks/lmodmodule.py
new file mode 100644
index 0000000000..6b4318b1d0
--- /dev/null
+++ b/lib/spack/spack/hooks/lmodmodule.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.modules
+
+
+def post_install(pkg):
+ dk = spack.modules.LmodModule(pkg.spec)
+ dk.write()
+
+
+def post_uninstall(pkg):
+ dk = spack.modules.LmodModule(pkg.spec)
+ dk.remove()
diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py
index 02c1ce3816..6f9736a018 100644
--- a/lib/spack/spack/hooks/sbang.py
+++ b/lib/spack/spack/hooks/sbang.py
@@ -81,8 +81,10 @@ def filter_shebang(path):
tty.warn("Patched overlong shebang in %s" % path)
-def filter_shebangs_in_directory(directory):
- for file in os.listdir(directory):
+def filter_shebangs_in_directory(directory, filenames=None):
+ if filenames is None:
+ filenames = os.listdir(directory)
+ for file in filenames:
path = os.path.join(directory, file)
# only handle files
@@ -104,6 +106,6 @@ def post_install(pkg):
"""This hook edits scripts so that they call /bin/bash
$spack_prefix/bin/sbang instead of something longer than the
shebang limit."""
- if not os.path.isdir(pkg.prefix.bin):
- return
- filter_shebangs_in_directory(pkg.prefix.bin)
+
+ for directory, _, filenames in os.walk(pkg.prefix):
+ filter_shebangs_in_directory(directory, filenames)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index 70c3c35d8c..aa3ad5843f 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -40,6 +40,7 @@ module file.
"""
import copy
import datetime
+import itertools
import os
import os.path
import re
@@ -48,6 +49,7 @@ import textwrap
import llnl.util.tty as tty
import spack
+import spack.compilers # Needed by LmodModules
import spack.config
from llnl.util.filesystem import join_path, mkdirp
from spack.build_environment import parent_class_modules
@@ -56,7 +58,8 @@ from spack.environment import *
__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-# Registry of all types of modules. Entries created by EnvModule's metaclass
+"""Registry of all types of modules. Entries created by EnvModule's
+ metaclass."""
module_types = {}
CONFIGURATION = spack.config.get_config('modules')
@@ -120,7 +123,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
- return spec.dependencies()
+ return spec.dependencies(deptype=('link', 'run'))
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
@@ -133,6 +136,7 @@ def dependencies(spec, request='all'):
spec.traverse(order='post',
depth=True,
cover='nodes',
+ deptype=('link', 'run'),
root=False),
reverse=True)]
return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
@@ -388,6 +392,7 @@ class EnvModule(object):
for mod in modules:
set_module_variables_for_package(package, mod)
set_module_variables_for_package(package, package.module)
+ package.setup_environment(spack_env, env)
package.setup_dependent_package(self.pkg.module, self.spec)
package.setup_dependent_environment(spack_env, env, self.spec)
@@ -632,3 +637,237 @@ class TclModule(EnvModule):
raise SystemExit('Module generation aborted.')
line = line.format(**naming_tokens)
yield line
+
+# To construct an arbitrary hierarchy of module files:
+# 1. Parse the configuration file and check that all the items in
+# hierarchical_scheme are indeed virtual packages
+# This needs to be done only once at start-up
+# 2. Order the stack as `hierarchical_scheme + ['mpi, 'compiler']
+# 3. Check which of the services are provided by the package
+# -> may be more than one
+# 4. Check which of the services are needed by the package
+# -> this determines where to write the module file
+# 5. For each combination of services in which we have at least one provider
+# here add the appropriate conditional MODULEPATH modifications
+
+
+class LmodModule(EnvModule):
+ name = 'lmod'
+ path = join_path(spack.share_path, "lmod")
+
+ environment_modifications_formats = {
+ PrependPath: 'prepend_path("{name}", "{value}")\n',
+ AppendPath: 'append_path("{name}", "{value}")\n',
+ RemovePath: 'remove_path("{name}", "{value}")\n',
+ SetEnv: 'setenv("{name}", "{value}")\n',
+ UnsetEnv: 'unsetenv("{name}")\n'
+ }
+
+ autoload_format = ('if not isloaded("{module_file}") then\n'
+ ' LmodMessage("Autoloading {module_file}")\n'
+ ' load("{module_file}")\n'
+ 'end\n\n')
+
+ prerequisite_format = 'prereq("{module_file}")\n'
+
+ family_format = 'family("{family}")\n'
+
+ path_part_with_hash = join_path('{token.name}', '{token.version}-{token.hash}') # NOQA: ignore=E501
+ path_part_without_hash = join_path('{token.name}', '{token.version}')
+
+ # TODO : Check that extra tokens specified in configuration file
+ # TODO : are actually virtual dependencies
+ configuration = CONFIGURATION.get('lmod', {})
+ hierarchy_tokens = configuration.get('hierarchical_scheme', [])
+ hierarchy_tokens = hierarchy_tokens + ['mpi', 'compiler']
+
+ def __init__(self, spec=None):
+ super(LmodModule, self).__init__(spec)
+ # Sets the root directory for this architecture
+ self.modules_root = join_path(LmodModule.path, self.spec.architecture)
+ # Retrieve core compilers
+ self.core_compilers = self.configuration.get('core_compilers', [])
+ # Keep track of the requirements that this package has in terms
+ # of virtual packages
+ # that participate in the hierarchical structure
+ self.requires = {'compiler': self.spec.compiler}
+ # For each virtual dependency in the hierarchy
+ for x in self.hierarchy_tokens:
+ if x in self.spec and not self.spec.package.provides(
+ x): # if I depend on it
+ self.requires[x] = self.spec[x] # record the actual provider
+ # Check what are the services I need (this will determine where the
+ # module file will be written)
+ self.substitutions = {}
+ self.substitutions.update(self.requires)
+ # TODO : complete substitutions
+ # Check what service I provide to others
+ self.provides = {}
+ # If it is in the list of supported compilers family -> compiler
+ if self.spec.name in spack.compilers.supported_compilers():
+ self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
+ # Special case for llvm
+ if self.spec.name == 'llvm':
+ self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
+ self.provides['compiler'].name = 'clang'
+
+ for x in self.hierarchy_tokens:
+ if self.spec.package.provides(x):
+ self.provides[x] = self.spec[x]
+
+ def _hierarchy_token_combinations(self):
+ """
+ Yields all the relevant combinations that could appear in the hierarchy
+ """
+ for ii in range(len(self.hierarchy_tokens) + 1):
+ for item in itertools.combinations(self.hierarchy_tokens, ii):
+ if 'compiler' in item:
+ yield item
+
+ def _hierarchy_to_be_provided(self):
+ """
+ Filters a list of hierarchy tokens and yields only the one that we
+ need to provide
+ """
+ for item in self._hierarchy_token_combinations():
+ if any(x in self.provides for x in item):
+ yield item
+
+ def token_to_path(self, name, value):
+ # If we are dealing with a core compiler, return 'Core'
+ if name == 'compiler' and str(value) in self.core_compilers:
+ return 'Core'
+ # CompilerSpec does not have an hash
+ if name == 'compiler':
+ return self.path_part_without_hash.format(token=value)
+ # For virtual providers add a small part of the hash
+ # to distinguish among different variants in a directory hierarchy
+ value.hash = value.dag_hash(length=6)
+ return self.path_part_with_hash.format(token=value)
+
+ @property
+ def file_name(self):
+ parts = [self.token_to_path(x, self.requires[x])
+ for x in self.hierarchy_tokens if x in self.requires]
+ hierarchy_name = join_path(*parts)
+ fullname = join_path(self.modules_root, hierarchy_name,
+ self.use_name + '.lua')
+ return fullname
+
+ @property
+ def use_name(self):
+ return self.token_to_path('', self.spec)
+
+ def modulepath_modifications(self):
+ # What is available is what we require plus what we provide
+ entry = ''
+ available = {}
+ available.update(self.requires)
+ available.update(self.provides)
+ available_parts = [self.token_to_path(x, available[x])
+ for x in self.hierarchy_tokens if x in available]
+ # Missing parts
+ missing = [x for x in self.hierarchy_tokens if x not in available]
+ # Direct path we provide on top of compilers
+ modulepath = join_path(self.modules_root, *available_parts)
+ env = EnvironmentModifications()
+ env.prepend_path('MODULEPATH', modulepath)
+ for line in self.process_environment_command(env):
+ entry += line
+
+ def local_variable(x):
+ lower, upper = x.lower(), x.upper()
+ fmt = 'local {lower}_name = os.getenv("LMOD_{upper}_NAME")\n'
+ fmt += 'local {lower}_version = os.getenv("LMOD_{upper}_VERSION")\n' # NOQA: ignore=501
+ return fmt.format(lower=lower, upper=upper)
+
+ def set_variables_for_service(env, x):
+ upper = x.upper()
+ s = self.provides[x]
+ name, version = os.path.split(self.token_to_path(x, s))
+
+ env.set('LMOD_{upper}_NAME'.format(upper=upper), name)
+ env.set('LMOD_{upper}_VERSION'.format(upper=upper), version)
+
+ def conditional_modulepath_modifications(item):
+ entry = 'if '
+ needed = []
+ for x in self.hierarchy_tokens:
+ if x in missing:
+ needed.append('{x}_name '.format(x=x))
+ entry += 'and '.join(needed) + 'then\n'
+ entry += ' local t = pathJoin("{root}"'.format(
+ root=self.modules_root)
+ for x in item:
+ if x in missing:
+ entry += ', {lower}_name, {lower}_version'.format(
+ lower=x.lower())
+ else:
+ entry += ', "{x}"'.format(
+ x=self.token_to_path(x, available[x]))
+ entry += ')\n'
+ entry += ' prepend_path("MODULEPATH", t)\n'
+ entry += 'end\n\n'
+ return entry
+
+ if 'compiler' not in self.provides:
+ # Retrieve variables
+ entry += '\n'
+ for x in missing:
+ entry += local_variable(x)
+ entry += '\n'
+ # Conditional modifications
+ conditionals = [x
+ for x in self._hierarchy_to_be_provided()
+ if any(t in missing for t in x)]
+ for item in conditionals:
+ entry += conditional_modulepath_modifications(item)
+
+ # Set environment variables for the services we provide
+ env = EnvironmentModifications()
+ for x in self.provides:
+ set_variables_for_service(env, x)
+ for line in self.process_environment_command(env):
+ entry += line
+
+ return entry
+
+ @property
+ def header(self):
+ timestamp = datetime.datetime.now()
+ # Header as in
+ # https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/advanced-user-guide/more-about-writing-module-files
+ header = "-- -*- lua -*-\n"
+ header += '-- Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
+ header += '--\n'
+ header += '-- %s\n' % self.spec.short_spec
+ header += '--\n'
+
+ # Short description -> whatis()
+ if self.short_description:
+ header += "whatis([[Name : {name}]])\n".format(name=self.spec.name)
+ header += "whatis([[Version : {version}]])\n".format(
+ version=self.spec.version)
+
+ # Long description -> help()
+ if self.long_description:
+ doc = re.sub(r'"', '\"', self.long_description)
+ header += "help([[{documentation}]])\n".format(documentation=doc)
+
+ # Certain things need to be done only if we provide a service
+ if self.provides:
+ # Add family directives
+ header += '\n'
+ for x in self.provides:
+ header += self.family_format.format(family=x)
+ header += '\n'
+ header += '-- MODULEPATH modifications\n'
+ header += '\n'
+ # Modify MODULEPATH
+ header += self.modulepath_modifications()
+ # Set environment variables for services we provide
+ header += '\n'
+ header += '-- END MODULEPATH modifications\n'
+ header += '\n'
+
+ return header
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
index 3e5ab9b2e9..dafb5c1d41 100644
--- a/lib/spack/spack/operating_systems/mac_os.py
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -22,7 +22,7 @@ class MacOs(OperatingSystem):
"10.11": "elcapitan",
"10.12": "sierra"}
- mac_ver = py_platform.mac_ver()[0][:-2]
+ mac_ver = '.'.join(py_platform.mac_ver()[0].split('.')[:2])
name = mac_releases.get(mac_ver, "macos")
super(MacOs, self).__init__(name, mac_ver)
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index dcdcee293d..498f3d4a6d 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -575,6 +575,20 @@ class PackageBase(object):
self.extra_args = {}
+ def possible_dependencies(self, visited=None):
+ """Return set of possible transitive dependencies of this package."""
+ if visited is None:
+ visited = set()
+
+ visited.add(self.name)
+ for name in self.dependencies:
+ if name not in visited and not spack.spec.Spec(name).virtual:
+ pkg = spack.repo.get(name)
+ for name in pkg.possible_dependencies(visited):
+ visited.add(name)
+
+ return visited
+
@property
def package_dir(self):
"""Return the directory where the package.py file lives."""
@@ -886,7 +900,7 @@ class PackageBase(object):
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
- self.spec.format('$_$@'), checksum_msg)
+ self.spec.format('$_$@'), ck_msg)
self.stage.fetch(mirror_only)
@@ -1080,7 +1094,8 @@ class PackageBase(object):
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
- run_tests=run_tests)
+ run_tests=run_tests,
+ dirty=dirty)
# Set run_tests flag before starting build.
self.run_tests = run_tests
@@ -1171,7 +1186,9 @@ class PackageBase(object):
keep_prefix = True if self.last_phase is None else keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
- spack.installed_db.add(self.spec, self.prefix, explicit=explicit)
+ spack.installed_db.add(
+ self.spec, spack.install_layout, explicit=explicit
+ )
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)
diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py
index 0059b49ff1..9138ad7afe 100644
--- a/lib/spack/spack/platforms/cray.py
+++ b/lib/spack/spack/platforms/cray.py
@@ -8,37 +8,21 @@ from spack.operating_systems.linux_distro import LinuxDistro
from spack.operating_systems.cnl import Cnl
from llnl.util.filesystem import join_path
-# Craype- module prefixes that are not valid CPU targets.
-NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe')
-
-
-def _target_from_clean_env(name):
- '''Return the default back_end target as loaded in a clean login session.
-
- A bash subshell is launched with a wiped environment and the list of loaded
- modules is parsed for the first acceptable CrayPE target.
- '''
- # Based on the incantation:
- # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
- targets = []
- if name != 'front_end':
- env = which('env')
- env.add_default_arg('-')
- # CAUTION - $USER is generally needed to initialize the environment.
- # There may be other variables needed for general success.
- output = env('USER=%s' % os.environ['USER'],
- '/bin/bash', '--noprofile', '--norc', '-c',
- '. /etc/profile; module list -lt',
- output=str, error=str)
- default_modules = [i for i in output.splitlines()
- if len(i.split()) == 1]
- tty.debug("Found default modules:",
- *[" " + mod for mod in default_modules])
- pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS))
- for mod in default_modules:
- if 'craype-' in mod:
- targets.extend(re.findall(pattern, mod))
- return targets[0] if targets else None
+
+def _get_modules_in_modulecmd_output(output):
+ '''Return list of valid modules parsed from modulecmd output string.'''
+ return [i for i in output.splitlines()
+ if len(i.split()) == 1]
+
+
+def _fill_craype_targets_from_modules(targets, modules):
+ '''Extend CrayPE CPU targets list with those found in list of modules.'''
+ # Craype- module prefixes that are not valid CPU targets.
+ non_targets = ('hugepages', 'network', 'target', 'accel', 'xtpe')
+ pattern = r'craype-(?!{0})(\S*)'.format('|'.join(non_targets))
+ for mod in modules:
+ if 'craype-' in mod:
+ targets.extend(re.findall(pattern, mod))
class Cray(Platform):
@@ -56,7 +40,12 @@ class Cray(Platform):
'''
super(Cray, self).__init__('cray')
- # Get targets from config or make best guess from environment:
+ # Make all craype targets available.
+ for target in self._avail_targets():
+ name = target.replace('-', '_')
+ self.add_target(name, Target(name, 'craype-%s' % target))
+
+ # Get aliased targets from config or best guess from environment:
conf = spack.config.get_config('targets')
for name in ('front_end', 'back_end'):
_target = getattr(self, name, None)
@@ -64,18 +53,16 @@ class Cray(Platform):
_target = os.environ.get('SPACK_' + name.upper())
if _target is None:
_target = conf.get(name)
- if _target is None:
- _target = _target_from_clean_env(name)
- setattr(self, name, _target)
-
+ if _target is None and name == 'back_end':
+ _target = self._default_target_from_env()
if _target is not None:
- self.add_target(name, Target(_target, 'craype-' + _target))
- self.add_target(_target, Target(_target, 'craype-' + _target))
+ safe_name = _target.replace('-', '_')
+ setattr(self, name, safe_name)
+ self.add_target(name, self.targets[safe_name])
if self.back_end is not None:
self.default = self.back_end
- self.add_target(
- 'default', Target(self.default, 'craype-' + self.default))
+ self.add_target('default', self.targets[self.back_end])
else:
raise NoPlatformError()
@@ -90,7 +77,7 @@ class Cray(Platform):
self.add_operating_system(self.front_os, front_distro)
@classmethod
- def setup_platform_environment(self, pkg, env):
+ def setup_platform_environment(cls, pkg, env):
""" Change the linker to default dynamic to be more
similar to linux/standard linker behavior
"""
@@ -101,5 +88,43 @@ class Cray(Platform):
env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
@classmethod
- def detect(self):
+ def detect(cls):
return os.environ.get('CRAYPE_VERSION') is not None
+
+ def _default_target_from_env(self):
+ '''Set and return the default CrayPE target loaded in a clean login
+ session.
+
+ A bash subshell is launched with a wiped environment and the list of
+ loaded modules is parsed for the first acceptable CrayPE target.
+ '''
+ # Based on the incantation:
+ # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
+ if getattr(self, 'default', None) is None:
+ env = which('env')
+ env.add_default_arg('-')
+ # CAUTION - $USER is generally needed in the sub-environment.
+ # There may be other variables needed for general success.
+ output = env('USER=%s' % os.environ['USER'],
+ 'HOME=%s' % os.environ['HOME'],
+ '/bin/bash', '--noprofile', '--norc', '-c',
+ '. /etc/profile; module list -lt',
+ output=str, error=str)
+ self._defmods = _get_modules_in_modulecmd_output(output)
+ targets = []
+ _fill_craype_targets_from_modules(targets, self._defmods)
+ self.default = targets[0] if targets else None
+ tty.debug("Found default modules:",
+ *[" %s" % mod for mod in self._defmods])
+ return self.default
+
+ def _avail_targets(self):
+ '''Return a list of available CrayPE CPU targets.'''
+ if getattr(self, '_craype_targets', None) is None:
+ module = which('modulecmd', required=True)
+ module.add_default_arg('python')
+ output = module('avail', '-t', 'craype-', output=str, error=str)
+ craype_modules = _get_modules_in_modulecmd_output(output)
+ self._craype_targets = targets = []
+ _fill_craype_targets_from_modules(targets, craype_modules)
+ return self._craype_targets
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
index f8066919f1..bdc70c9ef1 100644
--- a/lib/spack/spack/schema/modules.py
+++ b/lib/spack/spack/schema/modules.py
@@ -139,7 +139,20 @@ schema = {
'default': [],
'items': {
'type': 'string',
- 'enum': ['tcl', 'dotkit']}},
+ 'enum': ['tcl', 'dotkit', 'lmod']}},
+ 'lmod': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {
+ 'core_compilers': {
+ '$ref': '#/definitions/array_of_strings'
+ },
+ 'hierarchical_scheme': {
+ '$ref': '#/definitions/array_of_strings'
+ }
+ } # Specific lmod extensions
+ ]},
'tcl': {
'allOf': [
# Base configuration
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 8b0486c4da..ba9cea876d 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -123,6 +123,39 @@ from spack.util.spack_yaml import syaml_dict
from spack.version import *
from spack.provider_index import ProviderIndex
+__all__ = [
+ 'Spec',
+ 'alldeps',
+ 'nolink',
+ 'nobuild',
+ 'canonical_deptype',
+ 'validate_deptype',
+ 'parse',
+ 'parse_anonymous_spec',
+ 'SpecError',
+ 'SpecParseError',
+ 'DuplicateDependencyError',
+ 'DuplicateVariantError',
+ 'DuplicateCompilerSpecError',
+ 'UnsupportedCompilerError',
+ 'UnknownVariantError',
+ 'DuplicateArchitectureError',
+ 'InconsistentSpecError',
+ 'InvalidDependencyError',
+ 'InvalidDependencyTypeError',
+ 'NoProviderError',
+ 'MultipleProviderError',
+ 'UnsatisfiableSpecError',
+ 'UnsatisfiableSpecNameError',
+ 'UnsatisfiableVersionSpecError',
+ 'UnsatisfiableCompilerSpecError',
+ 'UnsatisfiableVariantSpecError',
+ 'UnsatisfiableCompilerFlagSpecError',
+ 'UnsatisfiableArchitectureSpecError',
+ 'UnsatisfiableProviderSpecError',
+ 'UnsatisfiableDependencySpecError',
+ 'SpackYAMLError',
+ 'AmbiguousHashError']
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@@ -156,12 +189,46 @@ _any_version = VersionList([':'])
# Special types of dependencies.
alldeps = ('build', 'link', 'run')
-nolink = ('build', 'run')
+nolink = ('build', 'run')
+nobuild = ('link', 'run')
+norun = ('link', 'build')
special_types = {
'alldeps': alldeps,
+ 'all': alldeps, # allow "all" as string but not symbol.
'nolink': nolink,
+ 'nobuild': nobuild,
+ 'norun': norun,
}
+legal_deps = tuple(special_types) + alldeps
+
+
+def validate_deptype(deptype):
+ if isinstance(deptype, str):
+ if deptype not in legal_deps:
+ raise InvalidDependencyTypeError(
+ "Invalid dependency type: %s" % deptype)
+
+ elif isinstance(deptype, (list, tuple)):
+ for t in deptype:
+ validate_deptype(t)
+
+ elif deptype is None:
+ raise InvalidDependencyTypeError("deptype cannot be None!")
+
+
+def canonical_deptype(deptype):
+ if deptype is None:
+ return alldeps
+
+ elif isinstance(deptype, str):
+ return special_types.get(deptype, (deptype,))
+
+ elif isinstance(deptype, (tuple, list)):
+ return (sum((canonical_deptype(d) for d in deptype), ()))
+
+ return deptype
+
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
@@ -527,6 +594,14 @@ class Spec(object):
# XXX(deptype): default deptypes
self._add_dependency(spec, ('build', 'link'))
+ def __getattr__(self, item):
+ """Delegate to self.package if the attribute is not in the spec"""
+ # This line is to avoid infinite recursion in case package is
+ # not present among self attributes
+ if item.endswith('libs'):
+ return getattr(self.package, item)
+ raise AttributeError()
+
def get_dependency(self, name):
dep = self._dependencies.get(name)
if dep is not None:
@@ -534,17 +609,8 @@ class Spec(object):
raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name))
- def _deptype_norm(self, deptype):
- if deptype is None:
- return alldeps
- # Force deptype to be a set object so that we can do set intersections.
- if isinstance(deptype, str):
- # Support special deptypes.
- return special_types.get(deptype, (deptype,))
- return deptype
-
def _find_deps(self, where, deptype):
- deptype = self._deptype_norm(deptype)
+ deptype = canonical_deptype(deptype)
return [dep.spec
for dep in where.values()
@@ -557,7 +623,7 @@ class Spec(object):
return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype):
- deptype = self._deptype_norm(deptype)
+ deptype = canonical_deptype(deptype)
return dict((dep.spec.name, dep)
for dep in where.values()
@@ -1353,12 +1419,11 @@ class Spec(object):
# parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
- def index(self):
+ def index(self, deptype=None):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
- # XXX(deptype): use a deptype kwarg.
- for spec in self.traverse():
+ for spec in self.traverse(deptype=deptype):
dm[spec.name] = spec
return dm
@@ -1561,7 +1626,7 @@ class Spec(object):
# actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited)
if extra:
- raise InvalidDependencyException(
+ raise InvalidDependencyError(
self.name + " does not depend on " + comma_or(extra))
# Mark the spec as normal once done.
@@ -2659,17 +2724,11 @@ def parse_anonymous_spec(spec_like, pkg_name):
class SpecError(spack.error.SpackError):
-
"""Superclass for all errors that occur while constructing specs."""
- def __init__(self, message):
- super(SpecError, self).__init__(message)
-
class SpecParseError(SpecError):
-
"""Wrapper for ParseError for when we're parsing specs."""
-
def __init__(self, parse_error):
super(SpecParseError, self).__init__(parse_error.message)
self.string = parse_error.string
@@ -2677,79 +2736,53 @@ class SpecParseError(SpecError):
class DuplicateDependencyError(SpecError):
-
"""Raised when the same dependency occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateDependencyError, self).__init__(message)
-
class DuplicateVariantError(SpecError):
-
"""Raised when the same variant occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateVariantError, self).__init__(message)
-
class DuplicateCompilerSpecError(SpecError):
-
"""Raised when the same compiler occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateCompilerSpecError, self).__init__(message)
-
class UnsupportedCompilerError(SpecError):
-
"""Raised when the user asks for a compiler spack doesn't know about."""
-
def __init__(self, compiler_name):
super(UnsupportedCompilerError, self).__init__(
"The '%s' compiler is not yet supported." % compiler_name)
class UnknownVariantError(SpecError):
-
"""Raised when the same variant occurs in a spec twice."""
-
def __init__(self, pkg, variant):
super(UnknownVariantError, self).__init__(
"Package %s has no variant %s!" % (pkg, variant))
class DuplicateArchitectureError(SpecError):
-
"""Raised when the same architecture occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateArchitectureError, self).__init__(message)
-
class InconsistentSpecError(SpecError):
-
"""Raised when two nodes in the same spec DAG have inconsistent
constraints."""
- def __init__(self, message):
- super(InconsistentSpecError, self).__init__(message)
-
-
-class InvalidDependencyException(SpecError):
+class InvalidDependencyError(SpecError):
"""Raised when a dependency in a spec is not actually a dependency
of the package."""
- def __init__(self, message):
- super(InvalidDependencyException, self).__init__(message)
+class InvalidDependencyTypeError(SpecError):
+ """Raised when a dependency type is not a legal Spack dep type."""
-class NoProviderError(SpecError):
+class NoProviderError(SpecError):
"""Raised when there is no package that provides a particular
virtual dependency.
"""
-
def __init__(self, vpkg):
super(NoProviderError, self).__init__(
"No providers found for virtual package: '%s'" % vpkg)
@@ -2757,11 +2790,9 @@ class NoProviderError(SpecError):
class MultipleProviderError(SpecError):
-
"""Raised when there is no package that provides a particular
virtual dependency.
"""
-
def __init__(self, vpkg, providers):
"""Takes the name of the vpkg"""
super(MultipleProviderError, self).__init__(
@@ -2772,10 +2803,8 @@ class MultipleProviderError(SpecError):
class UnsatisfiableSpecError(SpecError):
-
"""Raised when a spec conflicts with package constraints.
Provide the requirement that was violated when raising."""
-
def __init__(self, provided, required, constraint_type):
super(UnsatisfiableSpecError, self).__init__(
"%s does not satisfy %s" % (provided, required))
@@ -2785,89 +2814,70 @@ class UnsatisfiableSpecError(SpecError):
class UnsatisfiableSpecNameError(UnsatisfiableSpecError):
-
"""Raised when two specs aren't even for the same package."""
-
def __init__(self, provided, required):
super(UnsatisfiableSpecNameError, self).__init__(
provided, required, "name")
class UnsatisfiableVersionSpecError(UnsatisfiableSpecError):
-
"""Raised when a spec version conflicts with package constraints."""
-
def __init__(self, provided, required):
super(UnsatisfiableVersionSpecError, self).__init__(
provided, required, "version")
class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError):
-
"""Raised when a spec comiler conflicts with package constraints."""
-
def __init__(self, provided, required):
super(UnsatisfiableCompilerSpecError, self).__init__(
provided, required, "compiler")
class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
-
"""Raised when a spec variant conflicts with package constraints."""
-
def __init__(self, provided, required):
super(UnsatisfiableVariantSpecError, self).__init__(
provided, required, "variant")
class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
-
"""Raised when a spec variant conflicts with package constraints."""
-
def __init__(self, provided, required):
super(UnsatisfiableCompilerFlagSpecError, self).__init__(
provided, required, "compiler_flags")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
-
"""Raised when a spec architecture conflicts with package constraints."""
-
def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(
provided, required, "architecture")
class UnsatisfiableProviderSpecError(UnsatisfiableSpecError):
-
"""Raised when a provider is supplied but constraints don't match
a vpkg requirement"""
-
def __init__(self, provided, required):
super(UnsatisfiableProviderSpecError, self).__init__(
provided, required, "provider")
+
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
-
-
class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
-
"""Raised when some dependency of constrained specs are incompatible"""
-
def __init__(self, provided, required):
super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency")
class SpackYAMLError(spack.error.SpackError):
-
def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
class AmbiguousHashError(SpecError):
-
def __init__(self, msg, *specs):
super(AmbiguousHashError, self).__init__(msg)
for spec in specs:
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index c0705a89c8..b659cfb2fb 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -216,9 +216,9 @@ class Stage(object):
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
- if isinstance(self.fetcher, fs.URLFetchStrategy):
+ if isinstance(self.default_fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
- self.path, os.path.basename(self.fetcher.url)))
+ self.path, os.path.basename(self.default_fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(
@@ -227,18 +227,18 @@ class Stage(object):
return paths
@property
+ def save_filename(self):
+ possible_filenames = self.expected_archive_files
+ if possible_filenames:
+ # This prefers using the URL associated with the default fetcher if
+ # available, so that the fetched resource name matches the remote
+ # name
+ return possible_filenames[0]
+
+ @property
def archive_file(self):
"""Path to the source archive within this stage directory."""
- paths = []
- if isinstance(self.fetcher, fs.URLFetchStrategy):
- paths.append(os.path.join(
- self.path, os.path.basename(self.fetcher.url)))
-
- if self.mirror_path:
- paths.append(os.path.join(
- self.path, os.path.basename(self.mirror_path)))
-
- for path in paths:
+ for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
@@ -301,8 +301,10 @@ class Stage(object):
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
+ expand = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
+ expand = self.default_fetcher.expand_archive
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
@@ -310,9 +312,11 @@ class Stage(object):
# Add URL strategies for all the mirrors with the digest
for url in urls:
- fetchers.insert(0, fs.URLFetchStrategy(url, digest))
- fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path,
- digest))
+ fetchers.insert(
+ 0, fs.URLFetchStrategy(url, digest, expand=expand))
+ fetchers.insert(
+ 0, spack.fetch_cache.fetcher(
+ self.mirror_path, digest, expand=expand))
# Look for the archive in list_url
package_name = os.path.dirname(self.mirror_path)
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index db683917b5..0a946ff2ff 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -53,6 +53,7 @@ test_names = [
'git_fetch',
'hg_fetch',
'install',
+ 'library_list',
'link_tree',
'lock',
'make_executable',
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index f3e4bb31d2..73c711724c 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -223,6 +223,8 @@ class CompilerTest(unittest.TestCase):
def test_dep_include(self):
"""Ensure a single dependency include directory is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep4
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@@ -233,6 +235,8 @@ class CompilerTest(unittest.TestCase):
def test_dep_lib(self):
"""Ensure a single dependency RPATH is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@@ -241,10 +245,34 @@ class CompilerTest(unittest.TestCase):
'-Wl,-rpath,' + self.dep2 + '/lib64 ' +
' '.join(test_command))
+ def test_dep_lib_no_rpath(self):
+ """Ensure a single dependency link flag is added with no dep RPATH."""
+ os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-L' + self.dep2 + '/lib64 ' +
+ ' '.join(test_command))
+
+ def test_dep_lib_no_lib(self):
+ """Ensure a single dependency RPATH is added with no -L."""
+ os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-Wl,-rpath,' + self.dep2 + '/lib64 ' +
+ ' '.join(test_command))
+
def test_all_deps(self):
"""Ensure includes and RPATHs for all deps are added. """
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
# This is probably more constrained than it needs to be; it
# checks order within prepended args and doesn't strictly have
@@ -273,6 +301,8 @@ class CompilerTest(unittest.TestCase):
"""Ensure no (extra) -I args or -Wl, are passed in ld mode."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_ld('dump-args', test_command,
'ld ' +
@@ -290,10 +320,46 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command))
+ def test_ld_deps_no_rpath(self):
+ """Ensure SPACK_RPATH_DEPS controls RPATHs for ld."""
+ os.environ['SPACK_DEPENDENCIES'] = ':'.join([
+ self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+
+ self.check_ld('dump-args', test_command,
+ 'ld ' +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+
+ '-L' + self.dep3 + '/lib64 ' +
+ '-L' + self.dep2 + '/lib64 ' +
+ '-L' + self.dep1 + '/lib ' +
+
+ ' '.join(test_command))
+
+ def test_ld_deps_no_link(self):
+ """Ensure SPACK_LINK_DEPS controls -L for ld."""
+ os.environ['SPACK_DEPENDENCIES'] = ':'.join([
+ self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+
+ self.check_ld('dump-args', test_command,
+ 'ld ' +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+
+ '-rpath ' + self.dep3 + '/lib64 ' +
+ '-rpath ' + self.dep2 + '/lib64 ' +
+ '-rpath ' + self.dep1 + '/lib ' +
+
+ ' '.join(test_command))
+
def test_ld_deps_reentrant(self):
"""Make sure ld -r is handled correctly on OS's where it doesn't
support rpaths."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
reentrant_test_command = ['-r'] + test_command
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 22b1f17890..4395f17f97 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -26,8 +26,8 @@
These tests check the database is functioning properly,
both in memory and in its file
"""
-import os.path
import multiprocessing
+import os.path
import spack
from llnl.util.filesystem import join_path
@@ -88,16 +88,16 @@ class DatabaseTest(MockDatabase):
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
- dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
+ dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
- libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+ libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
@@ -124,6 +124,19 @@ class DatabaseTest(MockDatabase):
self.assertEqual(new_rec.path, rec.path)
self.assertEqual(new_rec.installed, rec.installed)
+ def _check_merkleiness(self):
+ """Ensure the spack database is a valid merkle graph."""
+ all_specs = spack.installed_db.query(installed=any)
+
+ seen = {}
+ for spec in all_specs:
+ for dep in spec.dependencies():
+ hash_key = dep.dag_hash()
+ if hash_key not in seen:
+ seen[hash_key] = id(dep)
+ else:
+ self.assertEqual(seen[hash_key], id(dep))
+
def _check_db_sanity(self):
"""Utiilty function to check db against install layout."""
expected = sorted(spack.install_layout.all_specs())
@@ -133,10 +146,17 @@ class DatabaseTest(MockDatabase):
for e, a in zip(expected, actual):
self.assertEqual(e, a)
+ self._check_merkleiness()
+
def test_020_db_sanity(self):
"""Make sure query() returns what's actually in the db."""
self._check_db_sanity()
+ def test_025_reindex(self):
+ """Make sure reindex works and ref counts are valid."""
+ spack.installed_db.reindex(spack.install_layout)
+ self._check_db_sanity()
+
def test_030_db_sanity_from_another_process(self):
def read_and_modify():
self._check_db_sanity() # check that other process can read DB
@@ -163,16 +183,16 @@ class DatabaseTest(MockDatabase):
# query specs with multiple configurations
mpileaks_specs = self.installed_db.query('mpileaks')
callpath_specs = self.installed_db.query('callpath')
- mpi_specs = self.installed_db.query('mpi')
+ mpi_specs = self.installed_db.query('mpi')
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
- dyninst_specs = self.installed_db.query('dyninst')
+ dyninst_specs = self.installed_db.query('dyninst')
libdwarf_specs = self.installed_db.query('libdwarf')
- libelf_specs = self.installed_db.query('libelf')
+ libelf_specs = self.installed_db.query('libelf')
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
@@ -203,9 +223,10 @@ class DatabaseTest(MockDatabase):
self.assertTrue(concrete_spec not in remaining)
# add it back and make sure everything is ok.
- self.installed_db.add(concrete_spec, "")
+ self.installed_db.add(concrete_spec, spack.install_layout)
installed = self.installed_db.query()
- self.assertEqual(len(installed), len(original))
+ self.assertTrue(concrete_spec in installed)
+ self.assertEqual(installed, original)
# sanity check against direcory layout and check ref counts.
self._check_db_sanity()
@@ -233,7 +254,7 @@ class DatabaseTest(MockDatabase):
self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1)
# Put the spec back
- self.installed_db.add(rec.spec, rec.path)
+ self.installed_db.add(rec.spec, spack.install_layout)
# record is present again
self.assertEqual(
diff --git a/lib/spack/spack/test/library_list.py b/lib/spack/spack/test/library_list.py
new file mode 100644
index 0000000000..7fc2fd222f
--- /dev/null
+++ b/lib/spack/spack/test/library_list.py
@@ -0,0 +1,111 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import unittest
+
+from llnl.util.filesystem import LibraryList
+
+
+class LibraryListTest(unittest.TestCase):
+ def setUp(self):
+ l = [
+ '/dir1/liblapack.a',
+ '/dir2/libfoo.dylib',
+ '/dir1/libblas.a',
+ '/dir3/libbar.so',
+ 'libbaz.so'
+ ]
+ self.liblist = LibraryList(l)
+
+ def test_repr(self):
+ x = eval(repr(self.liblist))
+ self.assertEqual(self.liblist, x)
+
+ def test_joined_and_str(self):
+ s1 = self.liblist.joined()
+ self.assertEqual(
+ s1,
+ '/dir1/liblapack.a /dir2/libfoo.dylib /dir1/libblas.a /dir3/libbar.so libbaz.so' # NOQA: ignore=E501
+ )
+ s2 = str(self.liblist)
+ self.assertEqual(s1, s2)
+ s3 = self.liblist.joined(';')
+ self.assertEqual(
+ s3,
+ '/dir1/liblapack.a;/dir2/libfoo.dylib;/dir1/libblas.a;/dir3/libbar.so;libbaz.so' # NOQA: ignore=E501
+ )
+
+ def test_flags(self):
+ search_flags = self.liblist.search_flags
+ self.assertTrue('-L/dir1' in search_flags)
+ self.assertTrue('-L/dir2' in search_flags)
+ self.assertTrue('-L/dir3' in search_flags)
+ self.assertTrue(isinstance(search_flags, str))
+
+ link_flags = self.liblist.link_flags
+ self.assertEqual(
+ link_flags,
+ '-llapack -lfoo -lblas -lbar -lbaz'
+ )
+
+ ld_flags = self.liblist.ld_flags
+ self.assertEqual(ld_flags, search_flags + ' ' + link_flags)
+
+ def test_paths_manipulation(self):
+ names = self.liblist.names
+ self.assertEqual(names, ['lapack', 'foo', 'blas', 'bar', 'baz'])
+
+ directories = self.liblist.directories
+ self.assertEqual(directories, ['/dir1', '/dir2', '/dir3'])
+
+ def test_get_item(self):
+ a = self.liblist[0]
+ self.assertEqual(a, '/dir1/liblapack.a')
+
+ b = self.liblist[:]
+ self.assertEqual(type(b), type(self.liblist))
+ self.assertEqual(self.liblist, b)
+ self.assertTrue(self.liblist is not b)
+
+ def test_add(self):
+ pylist = [
+ '/dir1/liblapack.a', # removed from the final list
+ '/dir2/libbaz.so',
+ '/dir4/libnew.a'
+ ]
+ another = LibraryList(pylist)
+ l = self.liblist + another
+ self.assertEqual(len(l), 7)
+ # Invariant : l == l + l
+ self.assertEqual(l, l + l)
+ # Always produce an instance of LibraryList
+ self.assertEqual(
+ type(self.liblist),
+ type(self.liblist + pylist)
+ )
+ self.assertEqual(
+ type(pylist + self.liblist),
+ type(self.liblist)
+ )
diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py
index fb96539897..32cbe13ce1 100644
--- a/lib/spack/spack/test/lock.py
+++ b/lib/spack/spack/test/lock.py
@@ -158,6 +158,35 @@ class LockTest(unittest.TestCase):
self.timeout_write, self.timeout_write)
#
+ # Test that read can be upgraded to write.
+ #
+ def test_upgrade_read_to_write(self):
+ # ensure lock file exists the first time, so we open it read-only
+ # to begin wtih.
+ touch(self.lock_path)
+
+ lock = Lock(self.lock_path)
+ self.assertTrue(lock._reads == 0)
+ self.assertTrue(lock._writes == 0)
+
+ lock.acquire_read()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 0)
+
+ lock.acquire_write()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 1)
+
+ lock.release_write()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 0)
+
+ lock.release_read()
+ self.assertTrue(lock._reads == 0)
+ self.assertTrue(lock._writes == 0)
+ self.assertTrue(lock._fd is None)
+
+ #
# Longer test case that ensures locks are reusable. Ordering is
# enforced by barriers throughout -- steps are shown with numbers.
#
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index 5e280d8e43..c4cf2865bb 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -49,105 +49,10 @@ def mock_open(filename, mode):
handle.close()
-configuration_autoload_direct = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'autoload': 'direct'
- }
- }
-}
-
-configuration_autoload_all = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'autoload': 'all'
- }
- }
-}
-
-configuration_prerequisites_direct = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'prerequisites': 'direct'
- }
- }
-}
-
-configuration_prerequisites_all = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'prerequisites': 'all'
- }
- }
-}
-
-configuration_alter_environment = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
- 'environment': {
- 'set': {'{name}_ROOT': '{prefix}'}
- }
- },
- 'platform=test target=x86_64': {
- 'environment': {
- 'set': {'FOO': 'foo'},
- 'unset': ['BAR']
- }
- },
- 'platform=test target=x86_32': {
- 'load': ['foo/bar']
- }
- }
-}
-
-configuration_blacklist = {
- 'enable': ['tcl'],
- 'tcl': {
- 'whitelist': ['zmpi'],
- 'blacklist': ['callpath', 'mpi'],
- 'all': {
- 'autoload': 'direct'
- }
- }
-}
-
-configuration_conflicts = {
- 'enable': ['tcl'],
- 'tcl': {
- 'naming_scheme': '{name}/{version}-{compiler.name}',
- 'all': {
- 'conflict': ['{name}', 'intel/14.0.1']
- }
- }
-}
-
-configuration_wrong_conflicts = {
- 'enable': ['tcl'],
- 'tcl': {
- 'naming_scheme': '{name}/{version}-{compiler.name}',
- 'all': {
- 'conflict': ['{name}/{compiler.name}']
- }
- }
-}
-
-configuration_suffix = {
- 'enable': ['tcl'],
- 'tcl': {
- 'mpileaks': {
- 'suffixes': {
- '+debug': 'foo',
- '~debug': 'bar'
- }
- }
- }
-}
+# Spec strings that will be used throughout the tests
+mpich_spec_string = 'mpich@3.0.4'
+mpileaks_spec_string = 'mpileaks'
+libdwarf_spec_string = 'libdwarf arch=x64-linux'
class HelperFunctionsTests(MockPackagesTest):
@@ -187,61 +92,195 @@ class HelperFunctionsTests(MockPackagesTest):
self.assertTrue('CPATH' in names)
-class TclTests(MockPackagesTest):
+class ModuleFileGeneratorTests(MockPackagesTest):
+ """
+ Base class to test module file generators. Relies on child having defined
+ a 'factory' attribute to create an instance of the generator to be tested.
+ """
def setUp(self):
- super(TclTests, self).setUp()
- self.configuration_obj = spack.modules.CONFIGURATION
+ super(ModuleFileGeneratorTests, self).setUp()
+ self.configuration_instance = spack.modules.CONFIGURATION
+ self.module_types_instance = spack.modules.module_types
spack.modules.open = mock_open
# Make sure that a non-mocked configuration will trigger an error
spack.modules.CONFIGURATION = None
+ spack.modules.module_types = {self.factory.name: self.factory}
def tearDown(self):
del spack.modules.open
- spack.modules.CONFIGURATION = self.configuration_obj
- super(TclTests, self).tearDown()
+ spack.modules.module_types = self.module_types_instance
+ spack.modules.CONFIGURATION = self.configuration_instance
+ super(ModuleFileGeneratorTests, self).tearDown()
def get_modulefile_content(self, spec):
spec.concretize()
- generator = spack.modules.TclModule(spec)
+ generator = self.factory(spec)
generator.write()
content = FILE_REGISTRY[generator.file_name].split('\n')
return content
+
+class TclTests(ModuleFileGeneratorTests):
+
+ factory = spack.modules.TclModule
+
+ configuration_autoload_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ configuration_autoload_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'all'
+ }
+ }
+ }
+
+ configuration_prerequisites_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
+ }
+ }
+
+ configuration_prerequisites_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'all'
+ }
+ }
+ }
+
+ configuration_alter_environment = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
+ 'environment': {
+ 'set': {'{name}_ROOT': '{prefix}'}
+ }
+ },
+ 'platform=test target=x86_64': {
+ 'environment': {
+ 'set': {'FOO': 'foo'},
+ 'unset': ['BAR']
+ }
+ },
+ 'platform=test target=x86_32': {
+ 'load': ['foo/bar']
+ }
+ }
+ }
+
+ configuration_blacklist = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'whitelist': ['zmpi'],
+ 'blacklist': ['callpath', 'mpi'],
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ configuration_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '{name}/{version}-{compiler.name}',
+ 'all': {
+ 'conflict': ['{name}', 'intel/14.0.1']
+ }
+ }
+ }
+
+ configuration_wrong_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '{name}/{version}-{compiler.name}',
+ 'all': {
+ 'conflict': ['{name}/{compiler.name}']
+ }
+ }
+ }
+
+ configuration_suffix = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'mpileaks': {
+ 'suffixes': {
+ '+debug': 'foo',
+ '~debug': 'bar'
+ }
+ }
+ }
+ }
+
def test_simple_case(self):
- spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpich@3.0.4')
+ spack.modules.CONFIGURATION = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpich_spec_string)
content = self.get_modulefile_content(spec)
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
self.assertRaises(TypeError, spack.modules.dependencies,
spec, 'non-existing-tag')
def test_autoload(self):
- spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpileaks')
+ spack.modules.CONFIGURATION = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
- spack.modules.CONFIGURATION = configuration_autoload_all
- spec = spack.spec.Spec('mpileaks')
+ spack.modules.CONFIGURATION = self.configuration_autoload_all
+ spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
+ # dtbuild1 has
+ # - 1 ('run',) dependency
+ # - 1 ('build','link') dependency
+ # - 1 ('build',) dependency
+ # Just make sure the 'build' dependency is not there
+ spack.modules.CONFIGURATION = self.configuration_autoload_direct
+ spec = spack.spec.Spec('dtbuild1')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
+
+ # dtbuild1 has
+ # - 1 ('run',) dependency
+ # - 1 ('build','link') dependency
+ # - 1 ('build',) dependency
+ # Just make sure the 'build' dependency is not there
+ spack.modules.CONFIGURATION = self.configuration_autoload_all
+ spec = spack.spec.Spec('dtbuild1')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
+
def test_prerequisites(self):
- spack.modules.CONFIGURATION = configuration_prerequisites_direct
+ spack.modules.CONFIGURATION = self.configuration_prerequisites_direct
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'prereq' in x]), 2)
- spack.modules.CONFIGURATION = configuration_prerequisites_all
+ spack.modules.CONFIGURATION = self.configuration_prerequisites_all
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'prereq' in x]), 5)
def test_alter_environment(self):
- spack.modules.CONFIGURATION = configuration_alter_environment
+ spack.modules.CONFIGURATION = self.configuration_alter_environment
spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
content = self.get_modulefile_content(spec)
self.assertEqual(
@@ -271,7 +310,7 @@ class TclTests(MockPackagesTest):
len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1)
def test_blacklist(self):
- spack.modules.CONFIGURATION = configuration_blacklist
+ spack.modules.CONFIGURATION = self.configuration_blacklist
spec = spack.spec.Spec('mpileaks ^zmpi')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
@@ -285,7 +324,7 @@ class TclTests(MockPackagesTest):
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
def test_conflicts(self):
- spack.modules.CONFIGURATION = configuration_conflicts
+ spack.modules.CONFIGURATION = self.configuration_conflicts
spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(
@@ -295,11 +334,11 @@ class TclTests(MockPackagesTest):
self.assertEqual(
len([x for x in content if x == 'conflict intel/14.0.1']), 1)
- spack.modules.CONFIGURATION = configuration_wrong_conflicts
+ spack.modules.CONFIGURATION = self.configuration_wrong_conflicts
self.assertRaises(SystemExit, self.get_modulefile_content, spec)
def test_suffixes(self):
- spack.modules.CONFIGURATION = configuration_suffix
+ spack.modules.CONFIGURATION = self.configuration_suffix
spec = spack.spec.Spec('mpileaks+debug arch=x86-linux')
spec.concretize()
generator = spack.modules.TclModule(spec)
@@ -311,18 +350,123 @@ class TclTests(MockPackagesTest):
self.assertTrue('bar' in generator.use_name)
-configuration_dotkit = {
- 'enable': ['dotkit'],
- 'dotkit': {
- 'all': {
- 'prerequisites': 'direct'
+class LmodTests(ModuleFileGeneratorTests):
+ factory = spack.modules.LmodModule
+
+ configuration_autoload_direct = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ configuration_autoload_all = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'autoload': 'all'
+ }
+ }
+ }
+
+ configuration_alter_environment = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
+ },
+ 'platform=test target=x86_64': {
+ 'environment': {
+ 'set': {'FOO': 'foo'},
+ 'unset': ['BAR']
+ }
+ },
+ 'platform=test target=x86_32': {
+ 'load': ['foo/bar']
+ }
+ }
+ }
+
+ configuration_blacklist = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'blacklist': ['callpath'],
+ 'all': {
+ 'autoload': 'direct'
+ }
}
}
-}
+
+ def test_simple_case(self):
+ spack.modules.CONFIGURATION = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpich_spec_string)
+ content = self.get_modulefile_content(spec)
+ self.assertTrue('-- -*- lua -*-' in content)
+ self.assertTrue('whatis([[Name : mpich]])' in content)
+ self.assertTrue('whatis([[Version : 3.0.4]])' in content)
+
+ def test_autoload(self):
+ spack.modules.CONFIGURATION = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x for x in content if 'if not isloaded(' in x]), 2)
+ self.assertEqual(len([x for x in content if 'load(' in x]), 2)
+
+ spack.modules.CONFIGURATION = self.configuration_autoload_all
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x for x in content if 'if not isloaded(' in x]), 5)
+ self.assertEqual(len([x for x in content if 'load(' in x]), 5)
+
+ def test_alter_environment(self):
+ spack.modules.CONFIGURATION = self.configuration_alter_environment
+ spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x
+ for x in content
+ if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]), 0)
+ self.assertEqual(
+ len([x for x in content if 'setenv("FOO", "foo")' in x]), 1)
+ self.assertEqual(
+ len([x for x in content if 'unsetenv("BAR")' in x]), 1)
+
+ spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
+ content = self.get_modulefile_content(spec)
+ print('\n'.join(content))
+ self.assertEqual(
+ len([x
+ for x in content
+ if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]), 0)
+ self.assertEqual(
+ len([x for x in content if 'setenv("FOO", "foo")' in x]), 0)
+ self.assertEqual(
+ len([x for x in content if 'unsetenv("BAR")' in x]), 0)
+
+ def test_blacklist(self):
+ spack.modules.CONFIGURATION = self.configuration_blacklist
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x for x in content if 'if not isloaded(' in x]), 1)
+ self.assertEqual(len([x for x in content if 'load(' in x]), 1)
class DotkitTests(MockPackagesTest):
+ configuration_dotkit = {
+ 'enable': ['dotkit'],
+ 'dotkit': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
+ }
+ }
+
def setUp(self):
super(DotkitTests, self).setUp()
self.configuration_obj = spack.modules.CONFIGURATION
@@ -343,7 +487,7 @@ class DotkitTests(MockPackagesTest):
return content
def test_dotkit(self):
- spack.modules.CONFIGURATION = configuration_dotkit
+ spack.modules.CONFIGURATION = self.configuration_dotkit
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertTrue('#c spack' in content)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 5c2731041c..40cdb02966 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -241,15 +241,15 @@ class SpecDagTest(MockPackagesTest):
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
- self.assertRaises(spack.spec.InvalidDependencyException,
+ self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
spec = Spec('libelf ^libdwarf')
- self.assertRaises(spack.spec.InvalidDependencyException,
+ self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
spec = Spec('mpich ^dyninst ^libelf')
- self.assertRaises(spack.spec.InvalidDependencyException,
+ self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
def test_equal(self):
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 3079288c77..d4eb9e057f 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -24,34 +24,34 @@
##############################################################################
import unittest
-import spack.spec
+import spack.spec as sp
from spack.parse import Token
from spack.spec import *
# Sample output for a complex lexing.
-complex_lex = [Token(ID, 'mvapich_foo'),
- Token(DEP),
- Token(ID, '_openmpi'),
- Token(AT),
- Token(ID, '1.2'),
- Token(COLON),
- Token(ID, '1.4'),
- Token(COMMA),
- Token(ID, '1.6'),
- Token(PCT),
- Token(ID, 'intel'),
- Token(AT),
- Token(ID, '12.1'),
- Token(COLON),
- Token(ID, '12.6'),
- Token(ON),
- Token(ID, 'debug'),
- Token(OFF),
- Token(ID, 'qt_4'),
- Token(DEP),
- Token(ID, 'stackwalker'),
- Token(AT),
- Token(ID, '8.1_1e')]
+complex_lex = [Token(sp.ID, 'mvapich_foo'),
+ Token(sp.DEP),
+ Token(sp.ID, '_openmpi'),
+ Token(sp.AT),
+ Token(sp.ID, '1.2'),
+ Token(sp.COLON),
+ Token(sp.ID, '1.4'),
+ Token(sp.COMMA),
+ Token(sp.ID, '1.6'),
+ Token(sp.PCT),
+ Token(sp.ID, 'intel'),
+ Token(sp.AT),
+ Token(sp.ID, '12.1'),
+ Token(sp.COLON),
+ Token(sp.ID, '12.6'),
+ Token(sp.ON),
+ Token(sp.ID, 'debug'),
+ Token(sp.OFF),
+ Token(sp.ID, 'qt_4'),
+ Token(sp.DEP),
+ Token(sp.ID, 'stackwalker'),
+ Token(sp.AT),
+ Token(sp.ID, '8.1_1e')]
class SpecSyntaxTest(unittest.TestCase):
@@ -74,16 +74,16 @@ class SpecSyntaxTest(unittest.TestCase):
"""
if spec is None:
spec = expected
- output = spack.spec.parse(spec)
+ output = sp.parse(spec)
parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed)
def check_lex(self, tokens, spec):
"""Check that the provided spec parses to the provided token list."""
- lex_output = SpecLexer().lex(spec)
+ lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output):
- if tok.type == ID:
+ if tok.type == sp.ID:
self.assertEqual(tok, spec_tok)
else:
# Only check the type for non-identifiers.