summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst92
-rwxr-xr-xlib/spack/env/cc94
-rw-r--r--lib/spack/llnl/util/filesystem.py18
-rw-r--r--lib/spack/spack/__init__.py4
-rw-r--r--lib/spack/spack/build_environment.py50
-rw-r--r--lib/spack/spack/cmd/compiler.py13
-rw-r--r--lib/spack/spack/cmd/module.py10
-rw-r--r--lib/spack/spack/cmd/pkg.py3
-rw-r--r--lib/spack/spack/cmd/stage.py5
-rw-r--r--lib/spack/spack/cmd/test-install.py300
-rw-r--r--lib/spack/spack/cmd/uninstall.py191
-rw-r--r--lib/spack/spack/fetch_strategy.py24
-rw-r--r--lib/spack/spack/modules.py17
-rw-r--r--lib/spack/spack/package.py15
-rw-r--r--lib/spack/spack/stage.py19
-rw-r--r--lib/spack/spack/test/__init__.py5
-rw-r--r--lib/spack/spack/test/cc.py24
-rw-r--r--lib/spack/spack/test/cmd/__init__.py0
-rw-r--r--lib/spack/spack/test/cmd/test_install.py190
-rw-r--r--lib/spack/spack/test/cmd/uninstall.py37
-rw-r--r--lib/spack/spack/test/database.py82
-rw-r--r--lib/spack/spack/test/install.py29
-rw-r--r--lib/spack/spack/test/mock_database.py80
-rw-r--r--lib/spack/spack/test/unit_install.py126
24 files changed, 899 insertions, 529 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index accf09cc2a..29791d98c4 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -149,26 +149,46 @@ customize an installation in :ref:`sec-specs`.
``spack uninstall``
~~~~~~~~~~~~~~~~~~~~~
-To uninstall a package, type ``spack uninstall <package>``. This will
-completely remove the directory in which the package was installed.
+To uninstall a package, type ``spack uninstall <package>``. This will ask the user for
+confirmation, and in case will completely remove the directory in which the package was installed.
.. code-block:: sh
spack uninstall mpich
If there are still installed packages that depend on the package to be
-uninstalled, spack will refuse to uninstall it. You can override this
-behavior with ``spack uninstall -f <package>``, but you risk breaking
-other installed packages. In general, it is safer to remove dependent
-packages *before* removing their dependencies.
+uninstalled, spack will refuse to uninstall it.
-A line like ``spack uninstall mpich`` may be ambiguous, if multiple
-``mpich`` configurations are installed. For example, if both
+To uninstall a package and every package that depends on it, you may give the
+`--dependents` option.
+
+.. code-block:: sh
+
+ spack uninstall --dependents mpich
+
+will display a list of all the packages that depends on `mpich` and, upon confirmation,
+will uninstall them in the right order.
+
+A line like
+
+.. code-block:: sh
+
+ spack uninstall mpich
+
+may be ambiguous, if multiple ``mpich`` configurations are installed. For example, if both
``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer
to either one. Because it cannot determine which one to uninstall,
-Spack will ask you to provide a version number to remove the
-ambiguity. As an example, ``spack uninstall mpich@3.1`` is
-unambiguous in this scenario.
+Spack will ask you either to provide a version number to remove the
+ambiguity or use the ``--all`` option to uninstall all of the matching packages.
+
+You may force uninstall a package with the `--force` option
+
+.. code-block:: sh
+
+ spack uninstall --force mpich
+
+but you risk breaking other installed packages. In general, it is safer to remove dependent
+packages *before* removing their dependencies or use the `--dependents` option.
Seeing installed packages
@@ -352,25 +372,32 @@ how this is done is in :ref:`sec-specs`.
``spack compiler add``
~~~~~~~~~~~~~~~~~~~~~~~
+An alias for ``spack compiler find``.
+
+.. _spack-compiler-find:
+
+``spack compiler find``
+~~~~~~~~~~~~~~~~~~~~~~~
+
If you do not see a compiler in this list, but you want to use it with
-Spack, you can simply run ``spack compiler add`` with the path to
+Spack, you can simply run ``spack compiler find`` with the path to
where the compiler is installed. For example::
- $ spack compiler add /usr/local/tools/ic-13.0.079
+ $ spack compiler find /usr/local/tools/ic-13.0.079
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
intel@13.0.079
-Or you can run ``spack compiler add`` with no arguments to force
+Or you can run ``spack compiler find`` with no arguments to force
auto-detection. This is useful if you do not know where compilers are
installed, but you know that new compilers have been added to your
``PATH``. For example, using dotkit, you might do this::
$ module load gcc-4.9.0
- $ spack compiler add
+ $ spack compiler find
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
gcc@4.9.0
-This loads the environment module for gcc-4.9.0 to get it into the
+This loads the environment module for gcc-4.9.0 to add it to
``PATH``, and then it adds the compiler to Spack.
.. _spack-compiler-info:
@@ -774,6 +801,39 @@ Environment modules
Spack provides some limited integration with environment module
systems to make it easier to use the packages it provides.
+
+Installing Environment Modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In order to use Spack's generated environment modules, you must have
+installed the *Environment Modules* package. On many Linux
+distributions, this can be installed from the vendor's repository.
+For example: ```yum install environment-modules``
+(Fedora/RHEL/CentOS). If your Linux distribution does not have
+Environment Modules, you can get it with Spack:
+
+1. Install with::
+
+.. code-block:: sh
+
+ spack install environment-modules
+
+2. Activate with::
+
+Add the following two lines to your ``.bashrc`` profile (or similar):
+
+.. code-block:: sh
+
+ MODULES_HOME=`spack location -i environment-modules`
+ source ${MODULES_HOME}/Modules/init/bash
+
+In case you use a Unix shell other than bash, substitute ``bash`` by
+the appropriate file in ``${MODULES_HOME}/Modules/init/``.
+
+
+Spack and Environment Modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
You can enable shell support by sourcing some files in the
``/share/spack`` directory.
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 2eb6f46afe..cb07a2ffea 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -39,7 +39,7 @@
#
# This is the list of environment variables that need to be set before
-# the script runs. They are set by routines in spack.build_environment
+# the script runs. They are set by routines in spack.build_environment
# as part of spack.package.Package.do_install().
parameters="
SPACK_PREFIX
@@ -50,7 +50,7 @@ SPACK_SHORT_SPEC"
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
-# Debug flag is optional; set to true for debug logging:
+# Debug flag is optional; set to "TRUE" for debug logging:
# SPACK_DEBUG
# Test command is used to unit test the compiler script.
# SPACK_TEST_COMMAND
@@ -66,11 +66,10 @@ function die {
for param in $parameters; do
if [[ -z ${!param} ]]; then
- die "Spack compiler must be run from spack! Input $param was missing!"
+ die "Spack compiler must be run from Spack! Input '$param' is missing."
fi
done
-#
# Figure out the type of compiler, the language, and the mode so that
# the compiler script knows what to do.
#
@@ -78,19 +77,18 @@ done
# 'command' is set based on the input command to $SPACK_[CC|CXX|F77|F90]
#
# 'mode' is set to one of:
+# vcheck version check
# cpp preprocess
# cc compile
# as assemble
# ld link
# ccld compile & link
-# vcheck version check
-#
-# Depending on the mode, we may or may not add extra rpaths.
-# This variable controls whether they are added.
-add_rpaths=true
command=$(basename "$0")
case "$command" in
+ cpp)
+ mode=cpp
+ ;;
cc|c89|c99|gcc|clang|icc|pgcc|xlc)
command="$SPACK_CC"
language="C"
@@ -107,34 +105,20 @@ case "$command" in
command="$SPACK_F77"
language="Fortran 77"
;;
- cpp)
- mode=cpp
- ;;
ld)
mode=ld
-
- # Darwin's linker has a -r argument that merges object files
- # together. It doesn't work with -rpath.
- if [[ $OSTYPE = darwin* ]]; then
- for arg in "$@"; do
- if [ "$arg" = -r ]; then
- add_rpaths=false
- break
- fi
- done
- fi
;;
*)
die "Unkown compiler: $command"
;;
esac
-# If any of the arguments below is present then the mode is vcheck. In
-# vcheck mode nothing is added in terms of extra search paths or
-# libraries
-if [ -z "$mode" ]; then
+# If any of the arguments below are present, then the mode is vcheck.
+# In vcheck mode, nothing is added in terms of extra search paths or
+# libraries.
+if [[ -z $mode ]]; then
for arg in "$@"; do
- if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then
+ if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
mode=vcheck
break
fi
@@ -142,16 +126,16 @@ if [ -z "$mode" ]; then
fi
# Finish setting up the mode.
-if [ -z "$mode" ]; then
+if [[ -z $mode ]]; then
mode=ccld
for arg in "$@"; do
- if [ "$arg" = -E ]; then
+ if [[ $arg == -E ]]; then
mode=cpp
break
- elif [ "$arg" = -S ]; then
+ elif [[ $arg == -S ]]; then
mode=as
break
- elif [ "$arg" = -c ]; then
+ elif [[ $arg == -c ]]; then
mode=cc
break
fi
@@ -159,7 +143,7 @@ if [ -z "$mode" ]; then
fi
# Dump the version and exit if we're in testing mode.
-if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
+if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then
echo "$mode"
exit
fi
@@ -170,10 +154,23 @@ if [[ -z $command ]]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
-if [ "$mode" == vcheck ] ; then
+if [[ $mode == vcheck ]]; then
exec ${command} "$@"
fi
+# Darwin's linker has a -r argument that merges object files together.
+# It doesn't work with -rpath.
+# This variable controls whether they are added.
+add_rpaths=true
+if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
+ for arg in "$@"; do
+ if [[ $arg == -r ]]; then
+ add_rpaths=false
+ break
+ fi
+ done
+fi
+
# Save original command for debug logging
input_command="$@"
args=("$@")
@@ -183,17 +180,17 @@ IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
for dep in "${deps[@]}"; do
# Prepend include directories
if [[ -d $dep/include ]]; then
- if [[ $mode = cpp || $mode = cc || $mode = as || $mode = ccld ]]; then
+ if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then
args=("-I$dep/include" "${args[@]}")
fi
fi
# Prepend lib and RPATH directories
if [[ -d $dep/lib ]]; then
- if [[ $mode = ccld ]]; then
+ if [[ $mode == ccld ]]; then
$add_rpaths && args=("-Wl,-rpath,$dep/lib" "${args[@]}")
args=("-L$dep/lib" "${args[@]}")
- elif [[ $mode = ld ]]; then
+ elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
args=("-L$dep/lib" "${args[@]}")
fi
@@ -201,10 +198,10 @@ for dep in "${deps[@]}"; do
# Prepend lib64 and RPATH directories
if [[ -d $dep/lib64 ]]; then
- if [[ $mode = ccld ]]; then
+ if [[ $mode == ccld ]]; then
$add_rpaths && args=("-Wl,-rpath,$dep/lib64" "${args[@]}")
args=("-L$dep/lib64" "${args[@]}")
- elif [[ $mode = ld ]]; then
+ elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
args=("-L$dep/lib64" "${args[@]}")
fi
@@ -212,9 +209,9 @@ for dep in "${deps[@]}"; do
done
# Include all -L's and prefix/whatever dirs in rpath
-if [[ $mode = ccld ]]; then
+if [[ $mode == ccld ]]; then
$add_rpaths && args=("-Wl,-rpath,$SPACK_PREFIX/lib" "-Wl,-rpath,$SPACK_PREFIX/lib64" "${args[@]}")
-elif [[ $mode = ld ]]; then
+elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "-rpath" "$SPACK_PREFIX/lib64" "${args[@]}")
fi
@@ -234,11 +231,14 @@ IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
spack_env_dirs+=("" ".")
PATH=""
for dir in "${env_path[@]}"; do
- remove=""
- for rm_dir in "${spack_env_dirs[@]}"; do
- if [[ $dir = $rm_dir ]]; then remove=True; fi
+ addpath=true
+ for env_dir in "${spack_env_dirs[@]}"; do
+ if [[ $dir == $env_dir ]]; then
+ addpath=false
+ break
+ fi
done
- if [[ -z $remove ]]; then
+ if $addpath; then
PATH="${PATH:+$PATH:}$dir"
fi
done
@@ -247,7 +247,7 @@ export PATH
full_command=("$command" "${args[@]}")
# In test command mode, write out full command for Spack tests.
-if [[ $SPACK_TEST_COMMAND = dump-args ]]; then
+if [[ $SPACK_TEST_COMMAND == dump-args ]]; then
echo "${full_command[@]}"
exit
elif [[ -n $SPACK_TEST_COMMAND ]]; then
@@ -257,7 +257,7 @@ fi
#
# Write the input and output commands to debug logs if it's asked for.
#
-if [[ $SPACK_DEBUG = TRUE ]]; then
+if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
echo "[$mode] $command $input_command" >> $input_log
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 46ca03bec4..70d46a7f77 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -27,7 +27,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree'
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
- 'remove_dead_links', 'remove_linked_tree', 'fix_darwin_install_name']
+ 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
+ 'fix_darwin_install_name']
import os
import glob
@@ -395,6 +396,7 @@ def remove_linked_tree(path):
else:
shutil.rmtree(path, True)
+
def fix_darwin_install_name(path):
"""
Fix install name of dynamic libraries on Darwin to have full path.
@@ -420,3 +422,17 @@ def fix_darwin_install_name(path):
if dep == os.path.basename(loc):
subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0]
break
+
+
+def find_library_path(libname, *paths):
+ """Searches for a file called <libname> in each path.
+
+ Return:
+ directory where the library was found, if found. None otherwise.
+
+ """
+ for path in paths:
+ library = join_path(path, libname)
+ if os.path.exists(library):
+ return path
+ return None
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index aee11f061f..9108e1d0e3 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -136,9 +136,7 @@ for path in _tmp_candidates:
# don't add a second username if it's already unique by user.
if not _tmp_user in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
-
-for path in _tmp_candidates:
- if not path in tmp_dirs:
+ else:
tmp_dirs.append(join_path(path, 'spack-stage'))
# Whether spack should allow installation of unsafe versions of
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 119a255a34..eb72f2a6b4 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -59,6 +59,11 @@ SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
+# Platform-specific library suffix.
+dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
+
+
+
class MakeExecutable(Executable):
"""Special callable executable object for make so the user can
specify parallel or not on a per-invocation basis. Using
@@ -208,7 +213,7 @@ def set_module_variables_for_package(pkg, module):
# TODO: of build dependencies, as opposed to link dependencies.
# TODO: Currently, everything is a link dependency, but tools like
# TODO: this shouldn't be.
- m.cmake = which("cmake")
+ m.cmake = Executable('cmake')
# standard CMake arguments
m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
@@ -246,6 +251,9 @@ def set_module_variables_for_package(pkg, module):
# a Prefix object.
m.prefix = pkg.prefix
+ # Platform-specific library suffix.
+ m.dso_suffix = dso_suffix
+
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
@@ -270,21 +278,6 @@ def parent_class_modules(cls):
return result
-def setup_module_variables_for_dag(pkg):
- """Set module-scope variables for all packages in the DAG."""
- for spec in pkg.spec.traverse(order='post'):
- # If a user makes their own package repo, e.g.
- # spack.repos.mystuff.libelf.Libelf, and they inherit from
- # an existing class like spack.repos.original.libelf.Libelf,
- # then set the module variables for both classes so the
- # parent class can still use them if it gets called.
- spkg = spec.package
- modules = parent_class_modules(spkg.__class__)
- for mod in modules:
- set_module_variables_for_package(spkg, mod)
- set_module_variables_for_package(spkg, spkg.module)
-
-
def setup_package(pkg):
"""Execute all environment setup routines."""
spack_env = EnvironmentModifications()
@@ -308,20 +301,27 @@ def setup_package(pkg):
set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env)
- setup_module_variables_for_dag(pkg)
- # Allow dependencies to modify the module
+ # traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
- for dependency_spec in spec.traverse(root=False):
- dpkg = dependency_spec.package
- dpkg.setup_dependent_package(pkg.module, spec)
+ for dspec in pkg.spec.traverse(order='post', root=False):
+ # If a user makes their own package repo, e.g.
+ # spack.repos.mystuff.libelf.Libelf, and they inherit from
+ # an existing class like spack.repos.original.libelf.Libelf,
+ # then set the module variables for both classes so the
+ # parent class can still use them if it gets called.
+ spkg = dspec.package
+ modules = parent_class_modules(spkg.__class__)
+ for mod in modules:
+ set_module_variables_for_package(spkg, mod)
+ set_module_variables_for_package(spkg, spkg.module)
- # Allow dependencies to set up environment as well
- for dependency_spec in spec.traverse(root=False):
- dpkg = dependency_spec.package
+ # Allow dependencies to modify the module
+ dpkg = dspec.package
+ dpkg.setup_dependent_package(pkg.module, spec)
dpkg.setup_dependent_environment(spack_env, run_env, spec)
- # Allow the package to apply some settings.
+ set_module_variables_for_package(pkg, pkg.module)
pkg.setup_environment(spack_env, run_env)
# Make sure nothing's strange about the Spack environment.
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 3e58e82184..a8e9e2a7a5 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -44,10 +44,10 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
- # Add
- add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.')
- add_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
- add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ # Find
+ find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
+ find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
+ find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
@@ -70,7 +70,7 @@ def setup_parser(subparser):
help="Configuration scope to read from.")
-def compiler_add(args):
+def compiler_find(args):
"""Search either $PATH or a list of paths for compilers and add them
to Spack's configuration."""
paths = args.add_paths
@@ -136,7 +136,8 @@ def compiler_list(args):
def compiler(parser, args):
- action = { 'add' : compiler_add,
+ action = { 'add' : compiler_find,
+ 'find' : compiler_find,
'remove' : compiler_remove,
'rm' : compiler_remove,
'info' : compiler_info,
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 315d9fc926..a67f5c0c13 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -22,21 +22,16 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
import os
import shutil
-import argparse
+import sys
import llnl.util.tty as tty
-from llnl.util.lang import partition_list
-from llnl.util.filesystem import mkdirp
-
import spack.cmd
+from llnl.util.filesystem import mkdirp
from spack.modules import module_types
from spack.util.string import *
-from spack.spec import Spec
-
description ="Manipulate modules and dotkits."
@@ -98,7 +93,6 @@ def module_refresh():
cls(spec).write()
-
def module(parser, args):
if args.module_command == 'refresh':
module_refresh()
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index cf478d3763..20a3fc5fc2 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -77,7 +77,8 @@ def get_git():
def list_packages(rev):
git = get_git()
- relpath = spack.packages_path[len(spack.prefix + os.path.sep):] + os.path.sep
+ pkgpath = os.path.join(spack.packages_path, 'packages')
+ relpath = pkgpath[len(spack.prefix + os.path.sep):] + os.path.sep
output = git('ls-tree', '--full-tree', '--name-only', rev, relpath,
output=str)
return sorted(line[len(relpath):] for line in output.split('\n') if line)
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index 5786780efb..975bb54ef7 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -35,6 +35,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
+ subparser.add_argument(
+ '-p', '--path', dest='path',
+ help="Path to stage package, does not add to spack tree")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to stage")
@@ -50,4 +53,6 @@ def stage(parser, args):
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
package = spack.repo.get(spec)
+ if args.path:
+ package.path = args.path
package.do_stage()
diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py
index 656873a2f0..3277e15548 100644
--- a/lib/spack/spack/cmd/test-install.py
+++ b/lib/spack/spack/cmd/test-install.py
@@ -23,87 +23,106 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
-import xml.etree.ElementTree as ET
-import itertools
-import re
-import os
import codecs
+import os
+import time
+import xml.dom.minidom
+import xml.etree.ElementTree as ET
import llnl.util.tty as tty
-from llnl.util.filesystem import *
-
import spack
+import spack.cmd
+from llnl.util.filesystem import *
from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError
-import spack.cmd
description = "Run package installation as a unit test, output formatted results."
-def setup_parser(subparser):
- subparser.add_argument(
- '-j', '--jobs', action='store', type=int,
- help="Explicitly set number of make jobs. Default is #cpus.")
-
- subparser.add_argument(
- '-n', '--no-checksum', action='store_true', dest='no_checksum',
- help="Do not check packages against checksum")
- subparser.add_argument(
- '-o', '--output', action='store', help="test output goes in this file")
+def setup_parser(subparser):
+ subparser.add_argument('-j',
+ '--jobs',
+ action='store',
+ type=int,
+ help="Explicitly set number of make jobs. Default is #cpus.")
- subparser.add_argument(
- 'package', nargs=argparse.REMAINDER, help="spec of package to install")
+ subparser.add_argument('-n',
+ '--no-checksum',
+ action='store_true',
+ dest='no_checksum',
+ help="Do not check packages against checksum")
+ subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
-class JunitResultFormat(object):
- def __init__(self):
- self.root = ET.Element('testsuite')
- self.tests = []
-
- def add_test(self, buildId, testResult, buildInfo=None):
- self.tests.append((buildId, testResult, buildInfo))
-
- def write_to(self, stream):
- self.root.set('tests', '{0}'.format(len(self.tests)))
- for buildId, testResult, buildInfo in self.tests:
- testcase = ET.SubElement(self.root, 'testcase')
- testcase.set('classname', buildId.name)
- testcase.set('name', buildId.stringId())
- if testResult == TestResult.FAILED:
- failure = ET.SubElement(testcase, 'failure')
- failure.set('type', "Build Error")
- failure.text = buildInfo
- elif testResult == TestResult.SKIPPED:
- skipped = ET.SubElement(testcase, 'skipped')
- skipped.set('type', "Skipped Build")
- skipped.text = buildInfo
- ET.ElementTree(self.root).write(stream)
+ subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
class TestResult(object):
PASSED = 0
FAILED = 1
SKIPPED = 2
+ ERRORED = 3
-class BuildId(object):
- def __init__(self, spec):
- self.name = spec.name
- self.version = spec.version
- self.hashId = spec.dag_hash()
-
- def stringId(self):
- return "-".join(str(x) for x in (self.name, self.version, self.hashId))
-
- def __hash__(self):
- return hash((self.name, self.version, self.hashId))
-
- def __eq__(self, other):
- if not isinstance(other, BuildId):
- return False
+class TestSuite(object):
+ def __init__(self, filename):
+ self.filename = filename
+ self.root = ET.Element('testsuite')
+ self.tests = []
- return ((self.name, self.version, self.hashId) ==
- (other.name, other.version, other.hashId))
+ def __enter__(self):
+ return self
+
+ def append(self, item):
+ if not isinstance(item, TestCase):
+ raise TypeError('only TestCase instances may be appended to a TestSuite instance')
+ self.tests.append(item) # Append the item to the list of tests
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Prepare the header for the entire test suite
+ number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
+ self.root.set('errors', str(number_of_errors))
+ number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
+ self.root.set('failures', str(number_of_failures))
+ self.root.set('tests', str(len(self.tests)))
+
+ for item in self.tests:
+ self.root.append(item.element)
+
+ with open(self.filename, 'wb') as file:
+ xml_string = ET.tostring(self.root)
+ xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml()
+ file.write(xml_string)
+
+
+class TestCase(object):
+
+ results = {
+ TestResult.PASSED: None,
+ TestResult.SKIPPED: 'skipped',
+ TestResult.FAILED: 'failure',
+ TestResult.ERRORED: 'error',
+ }
+
+ def __init__(self, classname, name, time=None):
+ self.element = ET.Element('testcase')
+ self.element.set('classname', str(classname))
+ self.element.set('name', str(name))
+ if time is not None:
+ self.element.set('time', str(time))
+ self.result_type = None
+
+ def set_result(self, result_type, message=None, error_type=None, text=None):
+ self.result_type = result_type
+ result = TestCase.results[self.result_type]
+ if result is not None and result is not TestResult.PASSED:
+ subelement = ET.SubElement(self.element, result)
+ if error_type is not None:
+ subelement.set('type', error_type)
+ if message is not None:
+ subelement.set('message', str(message))
+ if text is not None:
+ subelement.text = text
def fetch_log(path):
@@ -114,46 +133,76 @@ def fetch_log(path):
def failed_dependencies(spec):
- return set(childSpec for childSpec in spec.dependencies.itervalues() if not
- spack.repo.get(childSpec).installed)
-
-
-def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
- # Post-order traversal is not strictly required but it makes sense to output
- # tests for dependencies first.
- for spec in topSpec.traverse(order='post'):
- if spec not in newInstalls:
- continue
-
- failedDeps = failed_dependencies(spec)
- package = spack.repo.get(spec)
- if failedDeps:
- result = TestResult.SKIPPED
- dep = iter(failedDeps).next()
- depBID = BuildId(dep)
- errOutput = "Skipped due to failed dependency: {0}".format(
- depBID.stringId())
- elif (not package.installed) and (not package.stage.source_path):
- result = TestResult.FAILED
- errOutput = "Failure to fetch package resources."
- elif not package.installed:
- result = TestResult.FAILED
- lines = getLogFunc(package.build_log_path)
- errMessages = list(line for line in lines if
- re.search('error:', line, re.IGNORECASE))
- errOutput = errMessages if errMessages else lines[-10:]
- errOutput = '\n'.join(itertools.chain(
- [spec.to_yaml(), "Errors:"], errOutput,
- ["Build Log:", package.build_log_path]))
- else:
- result = TestResult.PASSED
- errOutput = None
-
- bId = BuildId(spec)
- output.add_test(bId, result, errOutput)
+ return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
+
+
+def get_top_spec_or_die(args):
+ specs = spack.cmd.parse_specs(args.package, concretize=True)
+ if len(specs) > 1:
+ tty.die("Only 1 top-level package can be specified")
+ top_spec = iter(specs).next()
+ return top_spec
+
+
+def install_single_spec(spec, number_of_jobs):
+ package = spack.repo.get(spec)
+
+ # If it is already installed, skip the test
+ if spack.repo.get(spec).installed:
+ testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
+ testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
+ return testcase
+
+ # If it relies on dependencies that did not install, skip
+ if failed_dependencies(spec):
+ testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
+ testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
+ return testcase
+
+ # Otherwise try to install the spec
+ try:
+ start_time = time.time()
+ package.do_install(keep_prefix=False,
+ keep_stage=True,
+ ignore_deps=False,
+ make_jobs=number_of_jobs,
+ verbose=True,
+ fake=False)
+ duration = time.time() - start_time
+ testcase = TestCase(package.name, package.spec.short_spec, duration)
+ testcase.set_result(TestResult.PASSED)
+ except InstallError:
+ # An InstallError is considered a failure (the recipe didn't work correctly)
+ duration = time.time() - start_time
+ # Try to get the log
+ lines = fetch_log(package.build_log_path)
+ text = '\n'.join(lines)
+ testcase = TestCase(package.name, package.spec.short_spec, duration)
+ testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
+
+ except FetchError:
+ # A FetchError is considered an error (we didn't even start building)
+ duration = time.time() - start_time
+ testcase = TestCase(package.name, package.spec.short_spec, duration)
+ testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
+
+ return testcase
+
+
+def get_filename(args, top_spec):
+ if not args.output:
+ fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
+ output_directory = join_path(os.getcwd(), 'test-output')
+ if not os.path.exists(output_directory):
+ os.mkdir(output_directory)
+ output_filename = join_path(output_directory, fname)
+ else:
+ output_filename = args.output
+ return output_filename
def test_install(parser, args):
+ # Check the input
if not args.package:
tty.die("install requires a package argument")
@@ -162,50 +211,15 @@ def test_install(parser, args):
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
- spack.do_checksum = False # TODO: remove this global.
-
- specs = spack.cmd.parse_specs(args.package, concretize=True)
- if len(specs) > 1:
- tty.die("Only 1 top-level package can be specified")
- topSpec = iter(specs).next()
-
- newInstalls = set()
- for spec in topSpec.traverse():
- package = spack.repo.get(spec)
- if not package.installed:
- newInstalls.add(spec)
-
- if not args.output:
- bId = BuildId(topSpec)
- outputDir = join_path(os.getcwd(), "test-output")
- if not os.path.exists(outputDir):
- os.mkdir(outputDir)
- outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId()))
- else:
- outputFpath = args.output
-
- for spec in topSpec.traverse(order='post'):
- # Calling do_install for the top-level package would be sufficient but
- # this attempts to keep going if any package fails (other packages which
- # are not dependents may succeed)
- package = spack.repo.get(spec)
- if (not failed_dependencies(spec)) and (not package.installed):
- try:
- package.do_install(
- keep_prefix=False,
- keep_stage=True,
- ignore_deps=False,
- make_jobs=args.jobs,
- verbose=True,
- fake=False)
- except InstallError:
- pass
- except FetchError:
- pass
-
- jrf = JunitResultFormat()
- handled = {}
- create_test_output(topSpec, newInstalls, jrf)
-
- with open(outputFpath, 'wb') as F:
- jrf.write_to(F)
+ spack.do_checksum = False # TODO: remove this global.
+
+ # Get the one and only top spec
+ top_spec = get_top_spec_or_die(args)
+ # Get the filename of the test
+ output_filename = get_filename(args, top_spec)
+ # TEST SUITE
+ with TestSuite(output_filename) as test_suite:
+ # Traverse in post order : each spec is a test case
+ for spec in top_spec.traverse(order='post'):
+ test_case = install_single_spec(spec, args.jobs)
+ test_suite.append(test_case)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 350ef372cb..1ff3d8db5f 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -23,19 +23,33 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import print_function
-import sys
+
import argparse
import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
-
import spack
import spack.cmd
import spack.repository
from spack.cmd.find import display_specs
-from spack.package import PackageStillNeededError
-description="Remove an installed package"
+description = "Remove an installed package"
+
+error_message = """You can either:
+ a) Use a more specific spec, or
+ b) use spack uninstall -a to uninstall ALL matching specs.
+"""
+
+
+def ask_for_confirmation(message):
+ while True:
+ tty.msg(message + '[y/n]')
+ choice = raw_input().lower()
+ if choice == 'y':
+ break
+ elif choice == 'n':
+ raise SystemExit('Operation aborted')
+ tty.warn('Please reply either "y" or "n"')
+
def setup_parser(subparser):
subparser.add_argument(
@@ -44,10 +58,101 @@ def setup_parser(subparser):
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " +
- "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
- "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
+ "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
+ "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+ '-d', '--dependents', action='store_true', dest='dependents',
+ help='Also uninstall any packages that depend on the ones given via command line.'
+ )
+ subparser.add_argument(
+ '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
+ help='Assume "yes" is the answer to every confirmation asked to the user.'
+
+ )
+ subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+
+
+def concretize_specs(specs, allow_multiple_matches=False, force=False):
+ """
+ Returns a list of specs matching the non necessarily concretized specs given from cli
+
+ Args:
+ specs: list of specs to be matched against installed packages
+ allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
+
+ Return:
+ list of specs
+ """
+ specs_from_cli = [] # List of specs that match expressions given via command line
+ has_errors = False
+ for spec in specs:
+ matching = spack.installed_db.query(spec)
+ # For each spec provided, make sure it refers to only one package.
+ # Fail and ask user to be unambiguous if it doesn't
+ if not allow_multiple_matches and len(matching) > 1:
+ tty.error("%s matches multiple packages:" % spec)
+ print()
+ display_specs(matching, long=True)
+ print()
+ has_errors = True
+
+ # No installed package matches the query
+ if len(matching) == 0 and not force:
+ tty.error("%s does not match any installed packages." % spec)
+ has_errors = True
+
+ specs_from_cli.extend(matching)
+ if has_errors:
+ tty.die(error_message)
+
+ return specs_from_cli
+
+
+def installed_dependents(specs):
+ """
+ Returns a dictionary that maps a spec with a list of its installed dependents
+
+ Args:
+ specs: list of specs to be checked for dependents
+
+ Returns:
+ dictionary of installed dependents
+ """
+ dependents = {}
+ for item in specs:
+ lst = [x for x in item.package.installed_dependents if x not in specs]
+ if lst:
+ lst = list(set(lst))
+ dependents[item] = lst
+ return dependents
+
+
+def do_uninstall(specs, force):
+ """
+ Uninstalls all the specs in a list.
+
+ Args:
+ specs: list of specs to be uninstalled
+ force: force uninstallation (boolean)
+ """
+ packages = []
+ for item in specs:
+ try:
+ # should work if package is known to spack
+ packages.append(item.package)
+ except spack.repository.UnknownPackageError as e:
+ # The package.py file has gone away -- but still
+ # want to uninstall.
+ spack.Package(item).do_uninstall(force=True)
+
+ # Sort packages to be uninstalled by the number of installed dependents
+ # This ensures we do things in the right order
+ def num_installed_deps(pkg):
+ return len(pkg.installed_dependents)
+
+ packages.sort(key=num_installed_deps)
+ for item in packages:
+ item.do_uninstall(force=force)
def uninstall(parser, args):
@@ -56,50 +161,34 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages)
+ # Gets the list of installed specs that match the ones give via cli
+ uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
+ dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
- # For each spec provided, make sure it refers to only one package.
- # Fail and ask user to be unambiguous if it doesn't
- pkgs = []
- for spec in specs:
- matching_specs = spack.installed_db.query(spec)
- if not args.all and len(matching_specs) > 1:
- tty.error("%s matches multiple packages:" % spec)
- print()
- display_specs(matching_specs, long=True)
- print()
- print("You can either:")
- print(" a) Use a more specific spec, or")
- print(" b) use spack uninstall -a to uninstall ALL matching specs.")
- sys.exit(1)
-
- if len(matching_specs) == 0:
- if args.force: continue
- tty.die("%s does not match any installed packages." % spec)
-
- for s in matching_specs:
- try:
- # should work if package is known to spack
- pkgs.append(s.package)
- except spack.repository.UnknownPackageError as e:
- # The package.py file has gone away -- but still
- # want to uninstall.
- spack.Package(s).do_uninstall(force=True)
-
- # Sort packages to be uninstalled by the number of installed dependents
- # This ensures we do things in the right order
- def num_installed_deps(pkg):
- return len(pkg.installed_dependents)
- pkgs.sort(key=num_installed_deps)
-
- # Uninstall packages in order now.
- for pkg in pkgs:
- try:
- pkg.do_uninstall(force=args.force)
- except PackageStillNeededError as e:
- tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True))
+ # Process dependent_list and update uninstall_list
+ has_error = False
+ if dependent_list and not args.dependents and not args.force:
+ for spec, lst in dependent_list.items():
+ tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
- display_specs(e.dependents, long=True)
+ display_specs(lst, long=True)
print('')
- print("You can use spack uninstall -f to force this action.")
- sys.exit(1)
+ has_error = True
+ elif args.dependents:
+ for key, lst in dependent_list.items():
+ uninstall_list.extend(lst)
+ uninstall_list = list(set(uninstall_list))
+
+ if has_error:
+ tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
+
+ if not args.yes_to_all:
+ tty.msg("The following packages will be uninstalled : ")
+ print('')
+ display_specs(uninstall_list, long=True)
+ print('')
+ ask_for_confirmation('Do you want to proceed ? ')
+
+ # Uninstall everything on the list
+ do_uninstall(uninstall_list, args.force)
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 4ea87bea7e..ce2c4e30c7 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -157,12 +157,26 @@ class URLFetchStrategy(FetchStrategy):
tty.msg("Already downloaded %s" % self.archive_file)
return
+ possible_files = self.stage.expected_archive_files
+ save_file = None
+ partial_file = None
+ if possible_files:
+ save_file = self.stage.expected_archive_files[0]
+ partial_file = self.stage.expected_archive_files[0] + '.part'
+
tty.msg("Trying to fetch from %s" % self.url)
- curl_args = ['-O', # save file to disk
+ if partial_file:
+ save_args = ['-C', '-', # continue partial downloads
+ '-o', partial_file] # use a .part file
+ else:
+ save_args = ['-O']
+
+ curl_args = save_args + [
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
- '-L', self.url, ]
+ '-L', # resolve 3xx redirects
+ self.url, ]
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
@@ -178,6 +192,9 @@ class URLFetchStrategy(FetchStrategy):
if self.archive_file:
os.remove(self.archive_file)
+ if partial_file and os.path.exists(partial_file):
+ os.remove(partial_file)
+
if spack.curl.returncode == 22:
# This is a 404. Curl will print the error.
raise FailedDownloadError(
@@ -209,6 +226,9 @@ class URLFetchStrategy(FetchStrategy):
"'spack clean <package>' to remove the bad archive, then fix",
"your internet gateway issue and install again.")
+ if save_file:
+ os.rename(partial_file, save_file)
+
if not self.archive_file:
raise FailedDownloadError(self.url)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index f6a11c92e3..61624fbd70 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -163,9 +163,14 @@ class EnvModule(object):
# package-specific modifications
spack_env = EnvironmentModifications()
for item in self.pkg.extendees:
- package = self.spec[item].package
- package.setup_dependent_package(self.pkg.module, self.spec)
- package.setup_dependent_environment(spack_env, env, self.spec)
+ try:
+ package = self.spec[item].package
+ package.setup_dependent_package(self.pkg.module, self.spec)
+ package.setup_dependent_environment(spack_env, env, self.spec)
+ except:
+ # The extends was conditional, so it doesn't count here
+ # eg: extends('python', when='+python')
+ pass
# Package-specific environment modifications
self.spec.package.setup_environment(spack_env, env)
@@ -206,7 +211,11 @@ class EnvModule(object):
def remove(self):
mod_file = self.file_name
if os.path.exists(mod_file):
- shutil.rmtree(mod_file, ignore_errors=True)
+ try:
+ os.remove(mod_file) # Remove the module file
+ os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up
+ except OSError:
+ pass # removedirs throws OSError on first non-empty directory found
class Dotkit(EnvModule):
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 9af3221837..4065553131 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -335,6 +335,9 @@ class Package(object):
if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:]
+ # Allow custom staging paths for packages
+ self.path=None
+
# Sanity check attributes required by Spack directives.
spack.directives.ensure_dicts(type(self))
@@ -445,7 +448,8 @@ class Package(object):
resource_stage_folder = self._resource_stage(resource)
resource_mirror = join_path(self.name, os.path.basename(fetcher.url))
stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource,
- name=resource_stage_folder, mirror_path=resource_mirror)
+ name=resource_stage_folder, mirror_path=resource_mirror,
+ path=self.path)
return stage
def _make_root_stage(self, fetcher):
@@ -455,7 +459,7 @@ class Package(object):
s = self.spec
stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash())
# Build the composite stage
- stage = Stage(fetcher, mirror_path=mp, name=stage_name)
+ stage = Stage(fetcher, mirror_path=mp, name=stage_name, path=self.path)
return stage
def _make_stage(self):
@@ -709,7 +713,6 @@ class Package(object):
if spack.do_checksum and self.version in self.versions:
self.stage.check()
-
def do_stage(self, mirror_only=False):
"""Unpacks the fetched tarball, then changes into the expanded tarball
directory."""
@@ -926,6 +929,9 @@ class Package(object):
install(env_path, env_install_path)
dump_packages(self.spec, packages_dir)
+ # Run post install hooks before build stage is removed.
+ spack.hooks.post_install(self)
+
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
@@ -954,9 +960,6 @@ class Package(object):
# the database, so that we don't need to re-read from file.
spack.installed_db.add(self.spec, self.prefix)
- # Once everything else is done, run post install hooks
- spack.hooks.post_install(self)
-
def sanity_check_prefix(self):
"""This function checks whether install succeeded."""
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index f88f82fc2d..84c47ee660 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -89,7 +89,7 @@ class Stage(object):
"""
def __init__(self, url_or_fetch_strategy,
- name=None, mirror_path=None, keep=False):
+ name=None, mirror_path=None, keep=False, path=None):
"""Create a stage object.
Parameters:
url_or_fetch_strategy
@@ -135,7 +135,10 @@ class Stage(object):
# Try to construct here a temporary name for the stage directory
# If this is a named stage, then construct a named path.
- self.path = join_path(spack.stage_path, self.name)
+ if path is not None:
+ self.path = path
+ else:
+ self.path = join_path(spack.stage_path, self.name)
# Flag to decide whether to delete the stage folder on exit or not
self.keep = keep
@@ -208,6 +211,18 @@ class Stage(object):
return False
@property
+ def expected_archive_files(self):
+ """Possible archive file paths."""
+ paths = []
+ if isinstance(self.fetcher, fs.URLFetchStrategy):
+ paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
+
+ if self.mirror_path:
+ paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
+
+ return paths
+
+ @property
def archive_file(self):
"""Path to the source archive within this stage directory."""
paths = []
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index cd842561e6..3c5edde66b 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -61,13 +61,14 @@ test_names = ['versions',
'optional_deps',
'make_executable',
'configure_guess',
- 'unit_install',
'lock',
'database',
'namespace_trie',
'yaml',
'sbang',
- 'environment']
+ 'environment',
+ 'cmd.uninstall',
+ 'cmd.test_install']
def list_tests():
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index 0b1aeb2a8f..594cd6efe9 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -219,3 +219,27 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command))
+ def test_ld_deps_reentrant(self):
+ """Make sure ld -r is handled correctly on OS's where it doesn't
+ support rpaths."""
+ os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
+
+ os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
+ reentrant_test_command = ['-r'] + test_command
+ self.check_ld('dump-args', reentrant_test_command,
+ 'ld ' +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+
+ '-L' + self.dep1 + '/lib ' +
+ '-rpath ' + self.dep1 + '/lib ' +
+
+ '-r ' +
+ ' '.join(test_command))
+
+ os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
+ self.check_ld('dump-args', reentrant_test_command,
+ 'ld ' +
+ '-L' + self.dep1 + '/lib ' +
+ '-r ' +
+ ' '.join(test_command))
diff --git a/lib/spack/spack/test/cmd/__init__.py b/lib/spack/spack/test/cmd/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/test/cmd/__init__.py
diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py
new file mode 100644
index 0000000000..2206c7bea1
--- /dev/null
+++ b/lib/spack/spack/test/cmd/test_install.py
@@ -0,0 +1,190 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import collections
+from contextlib import contextmanager
+
+import StringIO
+
+FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
+# Monkey-patch open to write module files to a StringIO instance
+@contextmanager
+def mock_open(filename, mode):
+ if not mode == 'wb':
+ raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open')
+
+ FILE_REGISTRY[filename] = StringIO.StringIO()
+
+ try:
+ yield FILE_REGISTRY[filename]
+ finally:
+ handle = FILE_REGISTRY[filename]
+ FILE_REGISTRY[filename] = handle.getvalue()
+ handle.close()
+
+import os
+import itertools
+import unittest
+
+import spack
+import spack.cmd
+
+
+# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python)
+test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
+
+
+class MockSpec(object):
+ def __init__(self, name, version, hashStr=None):
+ self.dependencies = {}
+ self.name = name
+ self.version = version
+ self.hash = hashStr if hashStr else hash((name, version))
+
+ def traverse(self, order=None):
+ for _, spec in self.dependencies.items():
+ yield spec
+ yield self
+ #allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
+ #return set(itertools.chain([self], allDeps))
+
+ def dag_hash(self):
+ return self.hash
+
+ @property
+ def short_spec(self):
+ return '-'.join([self.name, str(self.version), str(self.hash)])
+
+
+class MockPackage(object):
+ def __init__(self, spec, buildLogPath):
+ self.name = spec.name
+ self.spec = spec
+ self.installed = False
+ self.build_log_path = buildLogPath
+
+ def do_install(self, *args, **kwargs):
+ self.installed = True
+
+
+class MockPackageDb(object):
+ def __init__(self, init=None):
+ self.specToPkg = {}
+ if init:
+ self.specToPkg.update(init)
+
+ def get(self, spec):
+ return self.specToPkg[spec]
+
+
+def mock_fetch_log(path):
+ return []
+
+specX = MockSpec('X', "1.2.0")
+specY = MockSpec('Y', "2.3.8")
+specX.dependencies['Y'] = specY
+pkgX = MockPackage(specX, 'logX')
+pkgY = MockPackage(specY, 'logY')
+
+
+class MockArgs(object):
+ def __init__(self, package):
+ self.package = package
+ self.jobs = None
+ self.no_checksum = False
+ self.output = None
+
+
+# TODO: add test(s) where Y fails to install
+class TestInstallTest(unittest.TestCase):
+ """
+ Tests test-install where X->Y
+ """
+
+ def setUp(self):
+ super(TestInstallTest, self).setUp()
+
+ # Monkey patch parse specs
+ def monkey_parse_specs(x, concretize):
+ if x == 'X':
+ return [specX]
+ elif x == 'Y':
+ return [specY]
+ return []
+
+ self.parse_specs = spack.cmd.parse_specs
+ spack.cmd.parse_specs = monkey_parse_specs
+
+ # Monkey patch os.mkdirp
+ self.os_mkdir = os.mkdir
+ os.mkdir = lambda x: True
+
+ # Monkey patch open
+ test_install.open = mock_open
+
+ # Clean FILE_REGISTRY
+ FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
+ pkgX.installed = False
+ pkgY.installed = False
+
+ # Monkey patch pkgDb
+ self.saved_db = spack.repo
+ pkgDb = MockPackageDb({specX: pkgX, specY: pkgY})
+ spack.repo = pkgDb
+
+ def tearDown(self):
+ # Remove the monkey patched test_install.open
+ test_install.open = open
+
+ # Remove the monkey patched os.mkdir
+ os.mkdir = self.os_mkdir
+ del self.os_mkdir
+
+ # Remove the monkey patched parse_specs
+ spack.cmd.parse_specs = self.parse_specs
+ del self.parse_specs
+ super(TestInstallTest, self).tearDown()
+
+ spack.repo = self.saved_db
+
+ def test_installing_both(self):
+ test_install.test_install(None, MockArgs('X') )
+ self.assertEqual(len(FILE_REGISTRY), 1)
+ for _, content in FILE_REGISTRY.items():
+ self.assertTrue('tests="2"' in content)
+ self.assertTrue('failures="0"' in content)
+ self.assertTrue('errors="0"' in content)
+
+ def test_dependency_already_installed(self):
+ pkgX.installed = True
+ pkgY.installed = True
+ test_install.test_install(None, MockArgs('X'))
+ self.assertEqual(len(FILE_REGISTRY), 1)
+ for _, content in FILE_REGISTRY.items():
+ self.assertTrue('tests="2"' in content)
+ self.assertTrue('failures="0"' in content)
+ self.assertTrue('errors="0"' in content)
+ self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2)
diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py
new file mode 100644
index 0000000000..80efe06d36
--- /dev/null
+++ b/lib/spack/spack/test/cmd/uninstall.py
@@ -0,0 +1,37 @@
+import spack.test.mock_database
+
+from spack.cmd.uninstall import uninstall
+
+
+class MockArgs(object):
+ def __init__(self, packages, all=False, force=False, dependents=False):
+ self.packages = packages
+ self.all = all
+ self.force = force
+ self.dependents = dependents
+ self.yes_to_all = True
+
+
+class TestUninstall(spack.test.mock_database.MockDatabase):
+ def test_uninstall(self):
+ parser = None
+ # Multiple matches
+ args = MockArgs(['mpileaks'])
+ self.assertRaises(SystemExit, uninstall, parser, args)
+ # Installed dependents
+ args = MockArgs(['libelf'])
+ self.assertRaises(SystemExit, uninstall, parser, args)
+ # Recursive uninstall
+ args = MockArgs(['callpath'], all=True, dependents=True)
+ uninstall(parser, args)
+
+ all_specs = spack.install_layout.all_specs()
+ self.assertEqual(len(all_specs), 7)
+ # query specs with multiple configurations
+ mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
+ callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+
+ self.assertEqual(len(mpileaks_specs), 0)
+ self.assertEqual(len(callpath_specs), 0)
+ self.assertEqual(len(mpi_specs), 3)
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index ce6e8a0552..465263d057 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -28,16 +28,12 @@ both in memory and in its file
"""
import os.path
import multiprocessing
-import shutil
-import tempfile
import spack
from llnl.util.filesystem import join_path
from llnl.util.lock import *
from llnl.util.tty.colify import colify
-from spack.database import Database
-from spack.directory_layout import YamlDirectoryLayout
-from spack.test.mock_packages_test import *
+from spack.test.mock_database import MockDatabase
def _print_ref_counts():
@@ -75,80 +71,7 @@ def _print_ref_counts():
colify(recs, cols=3)
-class DatabaseTest(MockPackagesTest):
-
- def _mock_install(self, spec):
- s = Spec(spec)
- s.concretize()
- pkg = spack.repo.get(s)
- pkg.do_install(fake=True)
-
-
- def _mock_remove(self, spec):
- specs = spack.installed_db.query(spec)
- assert(len(specs) == 1)
- spec = specs[0]
- spec.package.do_uninstall(spec)
-
-
- def setUp(self):
- super(DatabaseTest, self).setUp()
- #
- # TODO: make the mockup below easier.
- #
-
- # Make a fake install directory
- self.install_path = tempfile.mkdtemp()
- self.spack_install_path = spack.install_path
- spack.install_path = self.install_path
-
- self.install_layout = YamlDirectoryLayout(self.install_path)
- self.spack_install_layout = spack.install_layout
- spack.install_layout = self.install_layout
-
- # Make fake database and fake install directory.
- self.installed_db = Database(self.install_path)
- self.spack_installed_db = spack.installed_db
- spack.installed_db = self.installed_db
-
- # make a mock database with some packages installed note that
- # the ref count for dyninst here will be 3, as it's recycled
- # across each install.
- #
- # Here is what the mock DB looks like:
- #
- # o mpileaks o mpileaks' o mpileaks''
- # |\ |\ |\
- # | o callpath | o callpath' | o callpath''
- # |/| |/| |/|
- # o | mpich o | mpich2 o | zmpi
- # | | o | fake
- # | | |
- # | |______________/
- # | .____________/
- # |/
- # o dyninst
- # |\
- # | o libdwarf
- # |/
- # o libelf
- #
-
- # Transaction used to avoid repeated writes.
- with spack.installed_db.write_transaction():
- self._mock_install('mpileaks ^mpich')
- self._mock_install('mpileaks ^mpich2')
- self._mock_install('mpileaks ^zmpi')
-
-
- def tearDown(self):
- super(DatabaseTest, self).tearDown()
- shutil.rmtree(self.install_path)
- spack.install_path = self.spack_install_path
- spack.install_layout = self.spack_install_layout
- spack.installed_db = self.spack_installed_db
-
-
+class DatabaseTest(MockDatabase):
def test_005_db_exists(self):
"""Make sure db cache file exists after creating."""
index_file = join_path(self.install_path, '.spack-db', 'index.yaml')
@@ -157,7 +80,6 @@ class DatabaseTest(MockPackagesTest):
self.assertTrue(os.path.exists(index_file))
self.assertTrue(os.path.exists(lock_file))
-
def test_010_all_install_sanity(self):
"""Ensure that the install layout reflects what we think it does."""
all_specs = spack.install_layout.all_specs()
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 8297893f01..fc5b7e67df 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -64,7 +64,14 @@ class InstallTest(MockPackagesTest):
shutil.rmtree(self.tmpdir, ignore_errors=True)
- def test_install_and_uninstall(self):
+ def fake_fetchify(self, pkg):
+ """Fake the URL for a package so it downloads from a file."""
+ fetcher = FetchStrategyComposite()
+ fetcher.append(URLFetchStrategy(self.repo.url))
+ pkg.fetcher = fetcher
+
+
+ def ztest_install_and_uninstall(self):
# Get a basic concrete spec for the trivial install package.
spec = Spec('trivial_install_test_package')
spec.concretize()
@@ -73,11 +80,7 @@ class InstallTest(MockPackagesTest):
# Get the package
pkg = spack.repo.get(spec)
- # Fake the URL for the package so it downloads from a file.
-
- fetcher = FetchStrategyComposite()
- fetcher.append(URLFetchStrategy(self.repo.url))
- pkg.fetcher = fetcher
+ self.fake_fetchify(pkg)
try:
pkg.do_install()
@@ -85,3 +88,17 @@ class InstallTest(MockPackagesTest):
except Exception, e:
pkg.remove_prefix()
raise
+
+
+ def test_install_environment(self):
+ spec = Spec('cmake-client').concretized()
+
+ for s in spec.traverse():
+ self.fake_fetchify(s.package)
+
+ pkg = spec.package
+ try:
+ pkg.do_install()
+ except Exception, e:
+ pkg.remove_prefix()
+ raise
diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py
new file mode 100644
index 0000000000..82ba59fc48
--- /dev/null
+++ b/lib/spack/spack/test/mock_database.py
@@ -0,0 +1,80 @@
+import shutil
+import tempfile
+
+import spack
+from spack.spec import Spec
+from spack.database import Database
+from spack.directory_layout import YamlDirectoryLayout
+from spack.test.mock_packages_test import MockPackagesTest
+
+
+class MockDatabase(MockPackagesTest):
+ def _mock_install(self, spec):
+ s = Spec(spec)
+ s.concretize()
+ pkg = spack.repo.get(s)
+ pkg.do_install(fake=True)
+
+ def _mock_remove(self, spec):
+ specs = spack.installed_db.query(spec)
+ assert len(specs) == 1
+ spec = specs[0]
+ spec.package.do_uninstall(spec)
+
+ def setUp(self):
+ super(MockDatabase, self).setUp()
+ #
+ # TODO: make the mockup below easier.
+ #
+
+ # Make a fake install directory
+ self.install_path = tempfile.mkdtemp()
+ self.spack_install_path = spack.install_path
+ spack.install_path = self.install_path
+
+ self.install_layout = YamlDirectoryLayout(self.install_path)
+ self.spack_install_layout = spack.install_layout
+ spack.install_layout = self.install_layout
+
+ # Make fake database and fake install directory.
+ self.installed_db = Database(self.install_path)
+ self.spack_installed_db = spack.installed_db
+ spack.installed_db = self.installed_db
+
+ # make a mock database with some packages installed note that
+ # the ref count for dyninst here will be 3, as it's recycled
+ # across each install.
+ #
+ # Here is what the mock DB looks like:
+ #
+ # o mpileaks o mpileaks' o mpileaks''
+ # |\ |\ |\
+ # | o callpath | o callpath' | o callpath''
+ # |/| |/| |/|
+ # o | mpich o | mpich2 o | zmpi
+ # | | o | fake
+ # | | |
+ # | |______________/
+ # | .____________/
+ # |/
+ # o dyninst
+ # |\
+ # | o libdwarf
+ # |/
+ # o libelf
+ #
+
+ # Transaction used to avoid repeated writes.
+ with spack.installed_db.write_transaction():
+ self._mock_install('mpileaks ^mpich')
+ self._mock_install('mpileaks ^mpich2')
+ self._mock_install('mpileaks ^zmpi')
+
+ def tearDown(self):
+ for spec in spack.installed_db.query():
+ spec.package.do_uninstall(spec)
+ super(MockDatabase, self).tearDown()
+ shutil.rmtree(self.install_path)
+ spack.install_path = self.spack_install_path
+ spack.install_layout = self.spack_install_layout
+ spack.installed_db = self.spack_installed_db
diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py
deleted file mode 100644
index 18615b7efe..0000000000
--- a/lib/spack/spack/test/unit_install.py
+++ /dev/null
@@ -1,126 +0,0 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import itertools
-import unittest
-
-import spack
-
-test_install = __import__("spack.cmd.test-install",
- fromlist=["BuildId", "create_test_output", "TestResult"])
-
-class MockOutput(object):
- def __init__(self):
- self.results = {}
-
- def add_test(self, buildId, passed=True, buildInfo=None):
- self.results[buildId] = passed
-
- def write_to(self, stream):
- pass
-
-class MockSpec(object):
- def __init__(self, name, version, hashStr=None):
- self.dependencies = {}
- self.name = name
- self.version = version
- self.hash = hashStr if hashStr else hash((name, version))
-
- def traverse(self, order=None):
- allDeps = itertools.chain.from_iterable(i.traverse() for i in
- self.dependencies.itervalues())
- return set(itertools.chain([self], allDeps))
-
- def dag_hash(self):
- return self.hash
-
- def to_yaml(self):
- return "<<<MOCK YAML {0}>>>".format(test_install.BuildId(self).stringId())
-
-class MockPackage(object):
- def __init__(self, buildLogPath):
- self.installed = False
- self.build_log_path = buildLogPath
-
-specX = MockSpec("X", "1.2.0")
-specY = MockSpec("Y", "2.3.8")
-specX.dependencies['Y'] = specY
-pkgX = MockPackage('logX')
-pkgY = MockPackage('logY')
-bIdX = test_install.BuildId(specX)
-bIdY = test_install.BuildId(specY)
-
-class UnitInstallTest(unittest.TestCase):
- """Tests test-install where X->Y"""
-
- def setUp(self):
- super(UnitInstallTest, self).setUp()
-
- pkgX.installed = False
- pkgY.installed = False
-
- self.saved_db = spack.repo
- pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
- spack.repo = pkgDb
-
-
- def tearDown(self):
- super(UnitInstallTest, self).tearDown()
-
- spack.repo = self.saved_db
-
- def test_installing_both(self):
- mo = MockOutput()
-
- pkgX.installed = True
- pkgY.installed = True
- test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=mock_fetch_log)
-
- self.assertEqual(mo.results,
- {bIdX:test_install.TestResult.PASSED,
- bIdY:test_install.TestResult.PASSED})
-
-
- def test_dependency_already_installed(self):
- mo = MockOutput()
-
- pkgX.installed = True
- pkgY.installed = True
- test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
- self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
-
- #TODO: add test(s) where Y fails to install
-
-
-class MockPackageDb(object):
- def __init__(self, init=None):
- self.specToPkg = {}
- if init:
- self.specToPkg.update(init)
-
- def get(self, spec):
- return self.specToPkg[spec]
-
-def mock_fetch_log(path):
- return []