summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/spack/docs/packaging_guide.rst14
-rw-r--r--lib/spack/spack/__init__.py7
-rw-r--r--lib/spack/spack/build_environment.py196
-rw-r--r--lib/spack/spack/cmd/module.py2
-rw-r--r--lib/spack/spack/cmd/uninstall.py23
-rw-r--r--lib/spack/spack/compilers/gcc.py3
-rw-r--r--lib/spack/spack/concretize.py4
-rw-r--r--lib/spack/spack/directory_layout.py2
-rw-r--r--lib/spack/spack/environment.py252
-rw-r--r--lib/spack/spack/modules.py234
-rw-r--r--lib/spack/spack/package.py192
-rw-r--r--lib/spack/spack/test/__init__.py3
-rw-r--r--lib/spack/spack/test/concretize.py7
-rw-r--r--lib/spack/spack/test/database.py1
-rw-r--r--lib/spack/spack/test/environment.py73
-rw-r--r--lib/spack/spack/url.py2
-rw-r--r--lib/spack/spack/util/compression.py7
-rw-r--r--lib/spack/spack/util/environment.py8
-rwxr-xr-xshare/spack/setup-env.sh2
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/package.py4
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch15
-rw-r--r--var/spack/repos/builtin/packages/eigen/package.py2
-rw-r--r--var/spack/repos/builtin/packages/llvm/package.py17
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py17
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/package.py2
-rw-r--r--var/spack/repos/builtin/packages/netlib-lapack/package.py7
-rw-r--r--var/spack/repos/builtin/packages/netlib-scalapack/package.py31
-rw-r--r--var/spack/repos/builtin/packages/octave/package.py2
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py6
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py13
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py8
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py3
-rw-r--r--var/spack/repos/builtin/packages/python/package.py40
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py9
-rw-r--r--var/spack/repos/builtin/packages/ruby/package.py30
-rw-r--r--var/spack/repos/builtin/packages/suite-sparse/package.py (renamed from var/spack/repos/builtin/packages/SuiteSparse/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/superlu-dist/package.py63
38 files changed, 988 insertions, 336 deletions
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index c1077e4497..519c0da232 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1844,6 +1844,20 @@ dedicated process.
.. _prefix-objects:
+
+Failing the build
+----------------------
+
+Sometimes you don't want a package to successfully install unless some
+condition is true. You can explicitly cause the build to fail from
+``install()`` by raising an ``InstallError``, for example:
+
+.. code-block:: python
+
+ if spec.architecture.startswith('darwin'):
+ raise InstallError('This package does not build on Mac OS X!')
+
+
Prefix objects
----------------------
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 3051d3f742..aee11f061f 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -188,3 +188,10 @@ __all__ += spack.directives.__all__
import spack.util.executable
from spack.util.executable import *
__all__ += spack.util.executable.__all__
+
+from spack.package import \
+ install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
+ InstallError, ExternalPackageError
+__all__ += [
+ 'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
+ 'InstallError', 'ExternalPackageError']
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 87fc310b5a..119a255a34 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -3,7 +3,7 @@ This module contains all routines related to setting up the package
build environment. All of this is set up by package.py just before
install() is called.
-There are two parts to the bulid environment:
+There are two parts to the build environment:
1. Python build environment (i.e. install() method)
@@ -13,7 +13,7 @@ There are two parts to the bulid environment:
the package's module scope. Ths allows package writers to call
them all directly in Package.install() without writing 'self.'
everywhere. No, this isn't Pythonic. Yes, it makes the code more
- readable and more like the shell script from whcih someone is
+ readable and more like the shell script from which someone is
likely porting.
2. Build execution environment
@@ -27,17 +27,18 @@ There are two parts to the bulid environment:
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
-import os
-import sys
-import shutil
import multiprocessing
+import os
import platform
-from llnl.util.filesystem import *
+import shutil
+import sys
import spack
-import spack.compilers as compilers
-from spack.util.executable import Executable, which
+import llnl.util.tty as tty
+from llnl.util.filesystem import *
+from spack.environment import EnvironmentModifications, validate
from spack.util.environment import *
+from spack.util.executable import Executable, which
#
# This can be set by the user to globally disable parallel builds.
@@ -83,85 +84,88 @@ class MakeExecutable(Executable):
return super(MakeExecutable, self).__call__(*args, **kwargs)
-def set_compiler_environment_variables(pkg):
- assert(pkg.spec.concrete)
- compiler = pkg.compiler
-
+def set_compiler_environment_variables(pkg, env):
+ assert pkg.spec.concrete
# Set compiler variables used by CMake and autotools
- assert all(key in pkg.compiler.link_paths
- for key in ('cc', 'cxx', 'f77', 'fc'))
+ assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
+ # Populate an object with the list of environment modifications
+ # and return it
+ # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
link_dir = spack.build_env_path
- os.environ['CC'] = join_path(link_dir, pkg.compiler.link_paths['cc'])
- os.environ['CXX'] = join_path(link_dir, pkg.compiler.link_paths['cxx'])
- os.environ['F77'] = join_path(link_dir, pkg.compiler.link_paths['f77'])
- os.environ['FC'] = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
+ env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
+ env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
+ env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
+ compiler = pkg.compiler
if compiler.cc:
- os.environ['SPACK_CC'] = compiler.cc
+ env.set('SPACK_CC', compiler.cc)
if compiler.cxx:
- os.environ['SPACK_CXX'] = compiler.cxx
+ env.set('SPACK_CXX', compiler.cxx)
if compiler.f77:
- os.environ['SPACK_F77'] = compiler.f77
+ env.set('SPACK_F77', compiler.f77)
if compiler.fc:
- os.environ['SPACK_FC'] = compiler.fc
+ env.set('SPACK_FC', compiler.fc)
- os.environ['SPACK_COMPILER_SPEC'] = str(pkg.spec.compiler)
+ env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+ return env
-def set_build_environment_variables(pkg):
- """This ensures a clean install environment when we build packages.
+def set_build_environment_variables(pkg, env):
+ """
+ This ensures a clean install environment when we build packages
"""
# Add spack build environment path with compiler wrappers first in
# the path. We add both spack.env_path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name
- # when we're building wtih the wrappers.
+ # when we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
- def add_env_path(path):
- env_paths.append(path)
- ci = join_path(path, 'case-insensitive')
- if os.path.isdir(ci): env_paths.append(ci)
- add_env_path(spack.build_env_path)
- add_env_path(join_path(spack.build_env_path, pkg.compiler.name))
-
- path_put_first("PATH", env_paths)
- path_set(SPACK_ENV_PATH, env_paths)
-
- # Prefixes of all of the package's dependencies go in
- # SPACK_DEPENDENCIES
+ for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
+ env_paths.append(item)
+ ci = join_path(item, 'case-insensitive')
+ if os.path.isdir(ci):
+ env_paths.append(ci)
+
+ for item in reversed(env_paths):
+ env.prepend_path('PATH', item)
+ env.set_path(SPACK_ENV_PATH, env_paths)
+
+ # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
- path_set(SPACK_DEPENDENCIES, dep_prefixes)
+ env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
+ env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
# Install prefix
- os.environ[SPACK_PREFIX] = pkg.prefix
+ env.set(SPACK_PREFIX, pkg.prefix)
# Install root prefix
- os.environ[SPACK_INSTALL] = spack.install_path
+ env.set(SPACK_INSTALL, spack.install_path)
# Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
- pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
+ env.unset('LD_LIBRARY_PATH')
+ env.unset('LD_RUN_PATH')
+ env.unset('DYLD_LIBRARY_PATH')
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes]
- path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)])
+ bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
+ for item in bin_dirs:
+ env.prepend_path('PATH', item)
# Working directory for the spack command itself, for debug logs.
if spack.debug:
- os.environ[SPACK_DEBUG] = "TRUE"
- os.environ[SPACK_SHORT_SPEC] = pkg.spec.short_spec
- os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
-
- # Add dependencies to CMAKE_PREFIX_PATH
- path_set("CMAKE_PREFIX_PATH", dep_prefixes)
+ env.set(SPACK_DEBUG, 'TRUE')
+ env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
+ env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
pkg_config_dirs = []
@@ -170,10 +174,12 @@ def set_build_environment_variables(pkg):
pcdir = join_path(p, libdir, 'pkgconfig')
if os.path.isdir(pcdir):
pkg_config_dirs.append(pcdir)
- path_set("PKG_CONFIG_PATH", pkg_config_dirs)
+ env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+
+ return env
-def set_module_variables_for_package(pkg, m):
+def set_module_variables_for_package(pkg, module):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
@@ -183,6 +189,8 @@ def set_module_variables_for_package(pkg, m):
jobs = 1
elif pkg.make_jobs:
jobs = pkg.make_jobs
+
+ m = module
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
@@ -217,7 +225,7 @@ def set_module_variables_for_package(pkg, m):
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
- m.spack_f90 = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Emulate some shell commands for convenience
m.pwd = os.getcwd
@@ -262,24 +270,63 @@ def parent_class_modules(cls):
return result
+def setup_module_variables_for_dag(pkg):
+ """Set module-scope variables for all packages in the DAG."""
+ for spec in pkg.spec.traverse(order='post'):
+ # If a user makes their own package repo, e.g.
+ # spack.repos.mystuff.libelf.Libelf, and they inherit from
+ # an existing class like spack.repos.original.libelf.Libelf,
+ # then set the module variables for both classes so the
+ # parent class can still use them if it gets called.
+ spkg = spec.package
+ modules = parent_class_modules(spkg.__class__)
+ for mod in modules:
+ set_module_variables_for_package(spkg, mod)
+ set_module_variables_for_package(spkg, spkg.module)
+
+
def setup_package(pkg):
"""Execute all environment setup routines."""
- set_compiler_environment_variables(pkg)
- set_build_environment_variables(pkg)
+ spack_env = EnvironmentModifications()
+ run_env = EnvironmentModifications()
+
+ # Before proceeding, ensure that specs and packages are consistent
+ #
+ # This is a confusing behavior due to how packages are
+ # constructed. `setup_dependent_package` may set attributes on
+ # specs in the DAG for use by other packages' install
+ # method. However, spec.package will look up a package via
+ # spack.repo, which defensively copies specs into packages. This
+ # code ensures that all packages in the DAG have pieces of the
+ # same spec object at build time.
+ #
+ # This is safe for the build process, b/c the build process is a
+ # throwaway environment, but it is kind of dirty.
+ #
+ # TODO: Think about how to avoid this fix and do something cleaner.
+ for s in pkg.spec.traverse(): s.package.spec = s
+
+ set_compiler_environment_variables(pkg, spack_env)
+ set_build_environment_variables(pkg, spack_env)
+ setup_module_variables_for_dag(pkg)
+
+ # Allow dependencies to modify the module
+ spec = pkg.spec
+ for dependency_spec in spec.traverse(root=False):
+ dpkg = dependency_spec.package
+ dpkg.setup_dependent_package(pkg.module, spec)
+
+ # Allow dependencies to set up environment as well
+ for dependency_spec in spec.traverse(root=False):
+ dpkg = dependency_spec.package
+ dpkg.setup_dependent_environment(spack_env, run_env, spec)
- # If a user makes their own package repo, e.g.
- # spack.repos.mystuff.libelf.Libelf, and they inherit from
- # an existing class like spack.repos.original.libelf.Libelf,
- # then set the module variables for both classes so the
- # parent class can still use them if it gets called.
- modules = parent_class_modules(pkg.__class__)
- for mod in modules:
- set_module_variables_for_package(pkg, mod)
+ # Allow the package to apply some settings.
+ pkg.setup_environment(spack_env, run_env)
- # Allow dependencies to set up environment as well.
- for dep_spec in pkg.spec.traverse(root=False):
- dep_spec.package.setup_dependent_environment(
- pkg.module, dep_spec, pkg.spec)
+ # Make sure nothing's strange about the Spack environment.
+ validate(spack_env, tty.warn)
+ spack_env.apply_modifications()
def fork(pkg, function):
@@ -296,23 +343,23 @@ def fork(pkg, function):
# do stuff
build_env.fork(pkg, child_fun)
- Forked processes are run with the build environemnt set up by
+ Forked processes are run with the build environment set up by
spack.build_environment. This allows package authors to have
- full control over the environment, etc. without offecting
+ full control over the environment, etc. without affecting
other builds that might be executed in the same spack call.
- If something goes wrong, the child process is expected toprint
+ If something goes wrong, the child process is expected to print
the error and the parent process will exit with error as
well. If things go well, the child exits and the parent
carries on.
"""
try:
pid = os.fork()
- except OSError, e:
+ except OSError as e:
raise InstallError("Unable to fork build process: %s" % e)
if pid == 0:
- # Give the child process the package's build environemnt.
+ # Give the child process the package's build environment.
setup_package(pkg)
try:
@@ -323,7 +370,7 @@ def fork(pkg, function):
# which interferes with unit tests.
os._exit(0)
- except spack.error.SpackError, e:
+ except spack.error.SpackError as e:
e.die()
except:
@@ -338,8 +385,7 @@ def fork(pkg, function):
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
- raise InstallError("Installation process had nonzero exit code."
- .format(str(returncode)))
+ raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
class InstallError(spack.error.SpackError):
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 1d6867c1d9..315d9fc926 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -80,7 +80,7 @@ def module_find(mtype, spec_array):
if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec))
- print mod.use_name
+ print(mod.use_name)
def module_refresh():
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index d01aa2136b..350ef372cb 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -22,6 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
import sys
import argparse
@@ -63,12 +64,12 @@ def uninstall(parser, args):
matching_specs = spack.installed_db.query(spec)
if not args.all and len(matching_specs) > 1:
tty.error("%s matches multiple packages:" % spec)
- print
+ print()
display_specs(matching_specs, long=True)
- print
- print "You can either:"
- print " a) Use a more specific spec, or"
- print " b) use spack uninstall -a to uninstall ALL matching specs."
+ print()
+ print("You can either:")
+ print(" a) Use a more specific spec, or")
+ print(" b) use spack uninstall -a to uninstall ALL matching specs.")
sys.exit(1)
if len(matching_specs) == 0:
@@ -79,7 +80,7 @@ def uninstall(parser, args):
try:
# should work if package is known to spack
pkgs.append(s.package)
- except spack.repository.UnknownPackageError, e:
+ except spack.repository.UnknownPackageError as e:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(s).do_uninstall(force=True)
@@ -94,11 +95,11 @@ def uninstall(parser, args):
for pkg in pkgs:
try:
pkg.do_uninstall(force=args.force)
- except PackageStillNeededError, e:
+ except PackageStillNeededError as e:
tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True))
- print
- print "The following packages depend on it:"
+ print('')
+ print("The following packages depend on it:")
display_specs(e.dependents, long=True)
- print
- print "You can use spack uninstall -f to force this action."
+ print('')
+ print("You can use spack uninstall -f to force this action.")
sys.exit(1)
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 495b638a3a..64214db32d 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -40,7 +40,8 @@ class Gcc(Compiler):
fc_names = ['gfortran']
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
- suffixes = [r'-mp-\d\.\d']
+ # Homebrew and Linuxes may build gcc with -X, -X.Y suffixes
+ suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path
link_paths = {'cc' : 'gcc/gcc',
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 8083f91982..2e576743ec 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -241,7 +241,7 @@ class DefaultConcretizer(object):
return False
#Find the another spec that has a compiler, or the root if none do
- other_spec = find_spec(spec, lambda(x) : x.compiler)
+ other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -288,7 +288,7 @@ def find_spec(spec, condition):
if condition(spec):
return spec
- return None # Nohting matched the condition.
+ return None # Nothing matched the condition.
def cmp_specs(lhs, rhs):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 39ee4e203d..da8f1aa1bc 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -150,7 +150,7 @@ class DirectoryLayout(object):
if os.path.exists(path):
try:
shutil.rmtree(path)
- except exceptions.OSError, e:
+ except exceptions.OSError as e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
new file mode 100644
index 0000000000..72aafa4e2d
--- /dev/null
+++ b/lib/spack/spack/environment.py
@@ -0,0 +1,252 @@
+import os
+import os.path
+import collections
+import inspect
+
+
+class NameModifier(object):
+ def __init__(self, name, **kwargs):
+ self.name = name
+ self.args = {'name': name}
+ self.args.update(kwargs)
+
+
+class NameValueModifier(object):
+ def __init__(self, name, value, **kwargs):
+ self.name = name
+ self.value = value
+ self.args = {'name': name, 'value': value}
+ self.args.update(kwargs)
+
+
+class SetEnv(NameValueModifier):
+ def execute(self):
+ os.environ[self.name] = str(self.value)
+
+
+class UnsetEnv(NameModifier):
+ def execute(self):
+ os.environ.pop(self.name, None) # Avoid throwing if the variable was not set
+
+
+class SetPath(NameValueModifier):
+ def execute(self):
+ string_path = concatenate_paths(self.value)
+ os.environ[self.name] = string_path
+
+
+class AppendPath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories.append(os.path.normpath(self.value))
+ os.environ[self.name] = ':'.join(directories)
+
+
+class PrependPath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories = [os.path.normpath(self.value)] + directories
+ os.environ[self.name] = ':'.join(directories)
+
+
+class RemovePath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)]
+ os.environ[self.name] = ':'.join(directories)
+
+
+class EnvironmentModifications(object):
+ """
+ Keeps track of requests to modify the current environment.
+
+ Each call to a method to modify the environment stores the extra information on the caller in the request:
+ - 'filename' : filename of the module where the caller is defined
+ - 'lineno': line number where the request occurred
+ - 'context' : line of code that issued the request that failed
+ """
+
+ def __init__(self, other=None):
+ """
+ Initializes a new instance, copying commands from other if it is not None
+
+ Args:
+ other: another instance of EnvironmentModifications from which (optional)
+ """
+ self.env_modifications = []
+ if other is not None:
+ self.extend(other)
+
+ def __iter__(self):
+ return iter(self.env_modifications)
+
+ def __len__(self):
+ return len(self.env_modifications)
+
+ def extend(self, other):
+ self._check_other(other)
+ self.env_modifications.extend(other.env_modifications)
+
+ @staticmethod
+ def _check_other(other):
+ if not isinstance(other, EnvironmentModifications):
+ raise TypeError('other must be an instance of EnvironmentModifications')
+
+ def _get_outside_caller_attributes(self):
+ stack = inspect.stack()
+ try:
+ _, filename, lineno, _, context, index = stack[2]
+ context = context[index].strip()
+ except Exception:
+ filename, lineno, context = 'unknown file', 'unknown line', 'unknown context'
+ args = {
+ 'filename': filename,
+ 'lineno': lineno,
+ 'context': context
+ }
+ return args
+
+ def set(self, name, value, **kwargs):
+ """
+ Stores in the current object a request to set an environment variable
+
+ Args:
+ name: name of the environment variable to be set
+ value: value of the environment variable
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = SetEnv(name, value, **kwargs)
+ self.env_modifications.append(item)
+
+ def unset(self, name, **kwargs):
+ """
+ Stores in the current object a request to unset an environment variable
+
+ Args:
+ name: name of the environment variable to be set
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = UnsetEnv(name, **kwargs)
+ self.env_modifications.append(item)
+
+ def set_path(self, name, elts, **kwargs):
+ """
+ Stores a request to set a path generated from a list.
+
+ Args:
+ name: name o the environment variable to be set.
+ elts: elements of the path to set.
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = SetPath(name, elts, **kwargs)
+ self.env_modifications.append(item)
+
+ def append_path(self, name, path, **kwargs):
+ """
+ Stores in the current object a request to append a path to a path list
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be appended
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = AppendPath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def prepend_path(self, name, path, **kwargs):
+ """
+ Same as `append_path`, but the path is pre-pended
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be pre-pended
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = PrependPath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def remove_path(self, name, path, **kwargs):
+ """
+ Stores in the current object a request to remove a path from a path list
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be removed
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = RemovePath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def group_by_name(self):
+ """
+ Returns a dict of the modifications grouped by variable name
+
+ Returns:
+ dict mapping the environment variable name to the modifications to be done on it
+ """
+ modifications = collections.defaultdict(list)
+ for item in self:
+ modifications[item.name].append(item)
+ return modifications
+
+ def clear(self):
+ """
+ Clears the current list of modifications
+ """
+ self.env_modifications.clear()
+
+ def apply_modifications(self):
+ """
+ Applies the modifications and clears the list
+ """
+ modifications = self.group_by_name()
+ # Apply the modifications to the environment variables one variable at a time
+ for name, actions in sorted(modifications.items()):
+ for x in actions:
+ x.execute()
+
+
+def concatenate_paths(paths):
+ """
+ Concatenates an iterable of paths into a string of column separated paths
+
+ Args:
+ paths: iterable of paths
+
+ Returns:
+ string
+ """
+ return ':'.join(str(item) for item in paths)
+
+
+def set_or_unset_not_first(variable, changes, errstream):
+ """
+ Check if we are going to set or unset something after other modifications have already been requested
+ """
+ indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
+ if indexes:
+ good = '\t \t{context} at {filename}:{lineno}'
+ nogood = '\t--->\t{context} at {filename}:{lineno}'
+ errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable))
+ for ii, item in enumerate(changes):
+ print_format = nogood if ii in indexes else good
+ errstream(print_format.format(**item.args))
+
+
+def validate(env, errstream):
+ """
+ Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for
+ everything that was found
+
+ Current checks:
+ - set or unset variables after other changes on the same variable
+
+ Args:
+ env: list of environment modifications
+ """
+ modifications = env.group_by_name()
+ for variable, list_of_changes in sorted(modifications.items()):
+ set_or_unset_not_first(variable, list_of_changes, errstream)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index c27043db8c..05c93cd3e6 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -22,14 +22,12 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""This module contains code for creating environment modules, which
-can include dotkits, tcl modules, lmod, and others.
+"""
+This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others.
-The various types of modules are installed by post-install hooks and
-removed after an uninstall by post-uninstall hooks. This class
-consolidates the logic for creating an abstract description of the
-information that module systems need. Currently that includes a
-number of directories to be appended to paths in the user's environment:
+The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks.
+This class consolidates the logic for creating an abstract description of the information that module systems need.
+Currently that includes a number of directories to be appended to paths in the user's environment:
* /bin directories to be appended to PATH
* /lib* directories for LD_LIBRARY_PATH
@@ -37,28 +35,25 @@ number of directories to be appended to paths in the user's environment:
* /man* and /share/man* directories for MANPATH
* the package prefix for CMAKE_PREFIX_PATH
-This module also includes logic for coming up with unique names for
-the module files so that they can be found by the various
-shell-support files in $SPACK/share/spack/setup-env.*.
+This module also includes logic for coming up with unique names for the module files so that they can be found by the
+various shell-support files in $SPACK/share/spack/setup-env.*.
-Each hook in hooks/ implements the logic for writing its specific type
-of module file.
+Each hook in hooks/ implements the logic for writing its specific type of module file.
"""
-__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-
import os
+import os.path
import re
-import textwrap
import shutil
-from glob import glob
+import textwrap
import llnl.util.tty as tty
+import spack
from llnl.util.filesystem import join_path, mkdirp
+from spack.environment import *
-import spack
+__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-"""Registry of all types of modules. Entries created by EnvModule's
- metaclass."""
+# Registry of all types of modules. Entries created by EnvModule's metaclass
module_types = {}
@@ -79,8 +74,43 @@ def print_help():
"")
+def inspect_path(prefix):
+ """
+ Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment
+ accordingly when an item is found.
+
+ Args:
+ prefix: prefix of the installation
+
+ Returns:
+ instance of EnvironmentModifications containing the requested modifications
+ """
+ env = EnvironmentModifications()
+ # Inspect the prefix to check for the existence of common directories
+ prefix_inspections = {
+ 'bin': ('PATH',),
+ 'man': ('MANPATH',),
+ 'lib': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
+ 'lib64': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
+ 'include': ('CPATH',)
+ }
+ for attribute, variables in prefix_inspections.items():
+ expected = getattr(prefix, attribute)
+ if os.path.isdir(expected):
+ for variable in variables:
+ env.prepend_path(variable, expected)
+ # PKGCONFIG
+ for expected in (join_path(prefix.lib, 'pkgconfig'), join_path(prefix.lib64, 'pkgconfig')):
+ if os.path.isdir(expected):
+ env.prepend_path('PKG_CONFIG_PATH', expected)
+ # CMake related variables
+ env.prepend_path('CMAKE_PREFIX_PATH', prefix)
+ return env
+
+
class EnvModule(object):
name = 'env_module'
+ formats = {}
class __metaclass__(type):
def __init__(cls, name, bases, dict):
@@ -88,66 +118,32 @@ class EnvModule(object):
if cls.name != 'env_module':
module_types[cls.name] = cls
-
def __init__(self, spec=None):
- # category in the modules system
- # TODO: come up with smarter category names.
- self.category = "spack"
+ self.spec = spec
+ self.pkg = spec.package # Just stored for convenience
- # Descriptions for the module system's UI
- self.short_description = ""
- self.long_description = ""
+ # short description default is just the package + version
+ # packages can provide this optional attribute
+ self.short_description = spec.format("$_ $@")
+ if hasattr(self.pkg, 'short_description'):
+ self.short_description = self.pkg.short_description
- # dict pathname -> list of directories to be prepended to in
- # the module file.
- self._paths = None
- self.spec = spec
+ # long description is the docstring with reduced whitespace.
+ self.long_description = None
+ if self.spec.package.__doc__:
+ self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
@property
- def paths(self):
- if self._paths is None:
- self._paths = {}
-
- def add_path(path_name, directory):
- path = self._paths.setdefault(path_name, [])
- path.append(directory)
-
- # Add paths if they exist.
- for var, directory in [
- ('PATH', self.spec.prefix.bin),
- ('MANPATH', self.spec.prefix.man),
- ('MANPATH', self.spec.prefix.share_man),
- ('LIBRARY_PATH', self.spec.prefix.lib),
- ('LIBRARY_PATH', self.spec.prefix.lib64),
- ('LD_LIBRARY_PATH', self.spec.prefix.lib),
- ('LD_LIBRARY_PATH', self.spec.prefix.lib64),
- ('CPATH', self.spec.prefix.include),
- ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib, 'pkgconfig')),
- ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib64, 'pkgconfig'))]:
-
- if os.path.isdir(directory):
- add_path(var, directory)
-
- # Add python path unless it's an actual python installation
- # TODO: is there a better way to do this?
- if self.spec.name != 'python':
- site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
- if site_packages:
- add_path('PYTHONPATH', site_packages[0])
-
- if self.spec.package.extends(spack.spec.Spec('ruby')):
- add_path('GEM_PATH', self.spec.prefix)
-
- # short description is just the package + version
- # TODO: maybe packages can optionally provide it.
- self.short_description = self.spec.format("$_ $@")
-
- # long description is the docstring with reduced whitespace.
- if self.spec.package.__doc__:
- self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
-
- return self._paths
+ def category(self):
+ # Anything defined at the package level takes precedence
+ if hasattr(self.pkg, 'category'):
+ return self.pkg.category
+ # Extensions
+ for extendee in self.pkg.extendees:
+ return '{extendee} extension'.format(extendee=extendee)
+ # Not very descriptive fallback
+ return 'spack installed package'
def write(self):
@@ -156,18 +152,41 @@ class EnvModule(object):
if not os.path.exists(module_dir):
mkdirp(module_dir)
- # If there are no paths, no need for a dotkit.
- if not self.paths:
+ # Environment modifications guessed by inspecting the
+ # installation prefix
+ env = inspect_path(self.spec.prefix)
+
+ # Let the extendee modify their extensions before asking for
+ # package-specific modifications
+ for extendee in self.pkg.extendees:
+ extendee_spec = self.spec[extendee]
+ extendee_spec.package.modify_module(
+ self.pkg.module, extendee_spec, self.spec)
+
+ # Package-specific environment modifications
+ spack_env = EnvironmentModifications()
+ self.spec.package.setup_environment(spack_env, env)
+
+ # TODO : implement site-specific modifications and filters
+ if not env:
return
with open(self.file_name, 'w') as f:
- self._write(f)
-
+ self.write_header(f)
+ for line in self.process_environment_command(env):
+ f.write(line)
- def _write(self, stream):
- """To be implemented by subclasses."""
+ def write_header(self, stream):
raise NotImplementedError()
+ def process_environment_command(self, env):
+ for command in env:
+ try:
+ yield self.formats[type(command)].format(**command.args)
+ except KeyError:
+ tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command)))
+ tty.warn('{context} at {filename}:{lineno}'.format(**command.args))
+
@property
def file_name(self):
@@ -175,14 +194,12 @@ class EnvModule(object):
where this module lives."""
raise NotImplementedError()
-
@property
def use_name(self):
"""Subclasses should implement this to return the name the
module command uses to refer to the package."""
raise NotImplementedError()
-
def remove(self):
mod_file = self.file_name
if os.path.exists(mod_file):
@@ -193,19 +210,23 @@ class Dotkit(EnvModule):
name = 'dotkit'
path = join_path(spack.share_path, "dotkit")
+ formats = {
+ PrependPath: 'dk_alter {name} {value}\n',
+ SetEnv: 'dk_setenv {name} {value}\n'
+ }
+
@property
def file_name(self):
- return join_path(Dotkit.path, self.spec.architecture,
- '%s.dk' % self.use_name)
+ return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name)
@property
def use_name(self):
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
self.spec.compiler.name,
- self.spec.compiler.version,
+ self.spec.compiler.version,
self.spec.dag_hash())
- def _write(self, dk_file):
+ def write_header(self, dk_file):
# Category
if self.category:
dk_file.write('#c %s\n' % self.category)
@@ -219,50 +240,41 @@ class Dotkit(EnvModule):
for line in textwrap.wrap(self.long_description, 72):
dk_file.write("#h %s\n" % line)
- # Path alterations
- for var, dirs in self.paths.items():
- for directory in dirs:
- dk_file.write("dk_alter %s %s\n" % (var, directory))
-
- # Let CMake find this package.
- dk_file.write("dk_alter CMAKE_PREFIX_PATH %s\n" % self.spec.prefix)
-
class TclModule(EnvModule):
name = 'tcl'
path = join_path(spack.share_path, "modules")
+ formats = {
+ PrependPath: 'prepend-path {name} \"{value}\"\n',
+ AppendPath: 'append-path {name} \"{value}\"\n',
+ RemovePath: 'remove-path {name} \"{value}\"\n',
+ SetEnv: 'setenv {name} \"{value}\"\n',
+ UnsetEnv: 'unsetenv {name}\n'
+ }
+
@property
def file_name(self):
return join_path(TclModule.path, self.spec.architecture, self.use_name)
-
@property
def use_name(self):
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
self.spec.compiler.name,
- self.spec.compiler.version,
+ self.spec.compiler.version,
self.spec.dag_hash())
-
- def _write(self, m_file):
- # TODO: cateogry?
- m_file.write('#%Module1.0\n')
-
+ def write_header(self, module_file):
+ # TCL Modulefile header
+ module_file.write('#%Module1.0\n')
+ # TODO : category ?
# Short description
if self.short_description:
- m_file.write('module-whatis \"%s\"\n\n' % self.short_description)
+ module_file.write('module-whatis \"%s\"\n\n' % self.short_description)
# Long description
if self.long_description:
- m_file.write('proc ModulesHelp { } {\n')
+ module_file.write('proc ModulesHelp { } {\n')
doc = re.sub(r'"', '\"', self.long_description)
- m_file.write("puts stderr \"%s\"\n" % doc)
- m_file.write('}\n\n')
-
- # Path alterations
- for var, dirs in self.paths.items():
- for directory in dirs:
- m_file.write("prepend-path %s \"%s\"\n" % (var, directory))
-
- m_file.write("prepend-path CMAKE_PREFIX_PATH \"%s\"\n" % self.spec.prefix)
+ module_file.write("puts stderr \"%s\"\n" % doc)
+ module_file.write('}\n\n')
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 291056a7b9..9af3221837 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -34,40 +34,34 @@ rundown on spack and how it differs from homebrew, look at the
README.
"""
import os
-import errno
import re
-import shutil
-import time
-import itertools
-import subprocess
-import platform as py_platform
-import multiprocessing
-from urlparse import urlparse, urljoin
import textwrap
-from StringIO import StringIO
+import time
+import glob
import llnl.util.tty as tty
-from llnl.util.tty.log import log_output
-from llnl.util.link_tree import LinkTree
-from llnl.util.filesystem import *
-from llnl.util.lang import *
-
import spack
-import spack.error
+import spack.build_environment
import spack.compilers
-import spack.mirror
-import spack.hooks
import spack.directives
+import spack.error
+import spack.fetch_strategy as fs
+import spack.hooks
+import spack.mirror
import spack.repository
-import spack.build_environment
import spack.url
import spack.util.web
-import spack.fetch_strategy as fs
-from spack.version import *
+from StringIO import StringIO
+from llnl.util.filesystem import *
+from llnl.util.lang import *
+from llnl.util.link_tree import LinkTree
+from llnl.util.tty.log import log_output
from spack.stage import Stage, ResourceStage, StageComposite
-from spack.util.compression import allowed_archive, extension
-from spack.util.executable import ProcessError
+from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment
+from spack.util.executable import ProcessError
+from spack.version import *
+from urlparse import urlparse
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -1008,38 +1002,127 @@ class Package(object):
return __import__(self.__class__.__module__,
fromlist=[self.__class__.__name__])
+ def setup_environment(self, spack_env, run_env):
+ """Set up the compile and runtime environemnts for a package.
+
+ `spack_env` and `run_env` are `EnvironmentModifications`
+ objects. Package authors can call methods on them to alter
+ the environment within Spack and at runtime.
- def setup_dependent_environment(self, module, spec, dependent_spec):
- """Called before the install() method of dependents.
+ Both `spack_env` and `run_env` are applied within the build
+ process, before this package's `install()` method is called.
+
+ Modifications in `run_env` will *also* be added to the
+ generated environment modules for this package.
Default implementation does nothing, but this can be
- overridden by an extendable package to set up the install
- environment for its extensions. This is useful if there are
- some common steps to installing all extensions for a
- certain package.
+ overridden if the package needs a particular environment.
- Some examples:
+ Examples:
- 1. Installing python modules generally requires PYTHONPATH to
- point to the lib/pythonX.Y/site-packages directory in the
- module's install prefix. This could set that variable.
+ 1. Qt extensions need `QTDIR` set.
- 2. Extensions often need to invoke the 'python' interpreter
- from the Python installation being extended. This routine can
- put a 'python' Execuable object in the module scope for the
- extension package to simplify extension installs.
+ Args:
+ spack_env (EnvironmentModifications): list of
+ modifications to be applied when this package is built
+ within Spack.
- 3. A lot of Qt extensions need QTDIR set. This can be used to do that.
+ run_env (EnvironmentModifications): list of environment
+ changes to be applied when this package is run outside
+ of Spack.
"""
pass
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ """Set up the environment of packages that depend on this one.
+
+ This is similar to `setup_environment`, but it is used to
+ modify the compile and runtime environments of packages that
+ *depend* on this one. This gives packages like Python and
+ others that follow the extension model a way to implement
+ common environment or compile-time settings for dependencies.
+
+ By default, this delegates to self.setup_environment()
+
+ Example :
+
+ 1. Installing python modules generally requires
+ `PYTHONPATH` to point to the lib/pythonX.Y/site-packages
+ directory in the module's install prefix. This could
+ set that variable.
+
+ Args:
+
+ spack_env (EnvironmentModifications): list of
+ modifications to be applied when the dependent package
+ is bulit within Spack.
+
+ run_env (EnvironmentModifications): list of environment
+ changes to be applied when the dependent package is
+ run outside of Spack.
+
+ dependent_spec (Spec): The spec of the dependent package
+ about to be built. This allows the extendee (self) to
+ query the dependent's state. Note that *this*
+ package's spec is available as `self.spec`.
+
+ This is useful if there are some common steps to installing
+ all extensions for a certain package.
+
+ """
+ self.setup_environment(spack_env, run_env)
+
+
+ def setup_dependent_package(self, module, dependent_spec):
+ """Set up Python module-scope variables for dependent packages.
+
+ Called before the install() method of dependents.
+
+ Default implementation does nothing, but this can be
+ overridden by an extendable package to set up the module of
+ its extensions. This is useful if there are some common steps
+ to installing all extensions for a certain package.
+
+ Example :
+
+ 1. Extensions often need to invoke the `python`
+ interpreter from the Python installation being
+ extended. This routine can put a 'python' Executable
+ object in the module scope for the extension package to
+ simplify extension installs.
+
+ 2. MPI compilers could set some variables in the
+ dependent's scope that point to `mpicc`, `mpicxx`,
+ etc., allowing them to be called by common names
+ regardless of which MPI is used.
+
+ 3. BLAS/LAPACK implementations can set some variables
+ indicating the path to their libraries, since these
+ paths differ by BLAS/LAPACK implementation.
+
+ Args:
+
+ module (module): The Python `module` object of the
+ dependent package. Packages can use this to set
+ module-scope variables for the dependent to use.
+
+ dependent_spec (Spec): The spec of the dependent package
+ about to be built. This allows the extendee (self) to
+ query the dependent's state. Note that *this*
+ package's spec is available as `self.spec`.
+
+ This is useful if there are some common steps to installing
+ all extensions for a certain package.
+
+ """
+ pass
+
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
raise InstallError("Package %s provides no install method!" % self.name)
-
def do_uninstall(self, force=False):
if not self.installed:
raise InstallError(str(self.spec) + " is not installed.")
@@ -1243,6 +1326,27 @@ class Package(object):
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
+def install_dependency_symlinks(pkg, spec, prefix):
+ """Execute a dummy install and flatten dependencies"""
+ flatten_dependencies(spec, prefix)
+
+def flatten_dependencies(spec, flat_dir):
+ """Make each dependency of spec present in dir via symlink."""
+ for dep in spec.traverse(root=False):
+ name = dep.name
+
+ dep_path = spack.install_layout.path_for_spec(dep)
+ dep_files = LinkTree(dep_path)
+
+ os.mkdir(flat_dir+'/'+name)
+
+ conflict = dep_files.find_conflict(flat_dir+'/'+name)
+ if conflict:
+ raise DependencyConflictError(conflict)
+
+ dep_files.merge(flat_dir+'/'+name)
+
+
def validate_package_url(url_string):
"""Determine whether spack can handle a particular URL or not."""
url = urlparse(url_string)
@@ -1330,6 +1434,10 @@ class InstallError(spack.error.SpackError):
super(InstallError, self).__init__(message, long_msg)
+class ExternalPackageError(InstallError):
+ """Raised by install() when a package is only for external use."""
+
+
class PackageStillNeededError(InstallError):
"""Raised when package is still needed by another on uninstall."""
def __init__(self, spec, dependents):
@@ -1380,3 +1488,11 @@ class ExtensionConflictError(ExtensionError):
class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)
+
+
+class DependencyConflictError(spack.error.SpackError):
+ """Raised when the dependencies cannot be flattened as asked for."""
+ def __init__(self, conflict):
+ super(DependencyConflictError, self).__init__(
+ "%s conflicts with another file in the flattened directory." %(
+ conflict))
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index d5d8b64765..cd842561e6 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -66,7 +66,8 @@ test_names = ['versions',
'database',
'namespace_trie',
'yaml',
- 'sbang']
+ 'sbang',
+ 'environment']
def list_tests():
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index f264faf17a..08cce09674 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -309,3 +309,10 @@ class ConcretizeTest(MockPackagesTest):
Spec('d')),
Spec('e'))
self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s))
+
+
+ def test_compiler_child(self):
+ s = Spec('mpileaks%clang ^dyninst%gcc')
+ s.concretize()
+ self.assertTrue(s['mpileaks'].satisfies('%clang'))
+ self.assertTrue(s['dyninst'].satisfies('%gcc'))
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 9a57e1f03e..ce6e8a0552 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -26,6 +26,7 @@
These tests check the database is functioning properly,
both in memory and in its file
"""
+import os.path
import multiprocessing
import shutil
import tempfile
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
new file mode 100644
index 0000000000..6c8f5ea43c
--- /dev/null
+++ b/lib/spack/spack/test/environment.py
@@ -0,0 +1,73 @@
+import unittest
+import os
+from spack.environment import EnvironmentModifications
+
+
+class EnvironmentTest(unittest.TestCase):
+ def setUp(self):
+ os.environ.clear()
+ os.environ['UNSET_ME'] = 'foo'
+ os.environ['EMPTY_PATH_LIST'] = ''
+ os.environ['PATH_LIST'] = '/path/second:/path/third'
+ os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+
+ def test_set(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value')
+ env.set('B', 3)
+ env.apply_modifications()
+ self.assertEqual('dummy value', os.environ['A'])
+ self.assertEqual(str(3), os.environ['B'])
+
+ def test_unset(self):
+ env = EnvironmentModifications()
+ self.assertEqual('foo', os.environ['UNSET_ME'])
+ env.unset('UNSET_ME')
+ env.apply_modifications()
+ self.assertRaises(KeyError, os.environ.__getitem__, 'UNSET_ME')
+
+ def test_set_path(self):
+ env = EnvironmentModifications()
+ env.set_path('A', ['foo', 'bar', 'baz'])
+ env.apply_modifications()
+ self.assertEqual('foo:bar:baz', os.environ['A'])
+
+ def test_path_manipulation(self):
+ env = EnvironmentModifications()
+
+ env.append_path('PATH_LIST', '/path/last')
+ env.prepend_path('PATH_LIST', '/path/first')
+
+ env.append_path('EMPTY_PATH_LIST', '/path/middle')
+ env.append_path('EMPTY_PATH_LIST', '/path/last')
+ env.prepend_path('EMPTY_PATH_LIST', '/path/first')
+
+ env.append_path('NEWLY_CREATED_PATH_LIST', '/path/middle')
+ env.append_path('NEWLY_CREATED_PATH_LIST', '/path/last')
+ env.prepend_path('NEWLY_CREATED_PATH_LIST', '/path/first')
+
+ env.remove_path('REMOVE_PATH_LIST', '/remove/this')
+ env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
+
+ env.apply_modifications()
+ self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST'])
+ self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST'])
+ self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST'])
+ self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST'])
+
+ def test_extra_arguments(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value', who='Pkg1')
+ for x in env:
+ assert 'who' in x.args
+ env.apply_modifications()
+ self.assertEqual('dummy value', os.environ['A'])
+
+ def test_extend(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value')
+ env.set('B', 3)
+ copy_construct = EnvironmentModifications(env)
+ self.assertEqual(len(copy_construct), 2)
+ for x, y in zip(env, copy_construct):
+ assert x is y
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index ad551a6ded..f51f05cad7 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -142,7 +142,7 @@ def split_url_extension(path):
def downloaded_file_extension(path):
"""This returns the type of archive a URL refers to. This is
- sometimes confusing becasue of URLs like:
+ sometimes confusing because of URLs like:
(1) https://github.com/petdance/ack/tarball/1.93_02
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index ea1f233bce..5ae5867428 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -27,13 +27,12 @@ import os
from itertools import product
from spack.util.executable import which
-# Supported archvie extensions.
+# Supported archive extensions.
PRE_EXTS = ["tar"]
EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"]
-# Add EXTS last so that .tar.gz is matched *before* tar.gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + EXTS
-
+# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
+ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
def allowed_archive(path):
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index ae8e5708be..55e653fd2f 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -59,14 +59,8 @@ def path_put_first(var_name, directories):
path_set(var_name, new_path)
-def pop_keys(dictionary, *keys):
- for key in keys:
- if key in dictionary:
- dictionary.pop(key)
-
-
def dump_environment(path):
"""Dump the current environment out to a file."""
with open(path, 'w') as env_file:
- for key,val in sorted(os.environ.items()):
+ for key, val in sorted(os.environ.items()):
env_file.write("%s=%s\n" % (key, val))
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index 586a5b836b..764af68400 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -141,7 +141,7 @@ function _spack_pathadd {
fi
# Do the actual prepending here.
- eval "_pa_oldvalue=\$${_pa_varname}"
+ eval "_pa_oldvalue=\${${_pa_varname}:-}"
if [ -d "$_pa_new_path" ] && [[ ":$_pa_oldvalue:" != *":$_pa_new_path:"* ]]; then
if [ -n "$_pa_oldvalue" ]; then
diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py
index 0b49d14202..614071cf53 100644
--- a/var/spack/repos/builtin/packages/arpack-ng/package.py
+++ b/var/spack/repos/builtin/packages/arpack-ng/package.py
@@ -35,6 +35,10 @@ class ArpackNg(Package):
variant('shared', default=True, description='Enables the build of shared libraries')
variant('mpi', default=False, description='Activates MPI support')
+ # The function pdlamch10 does not set the return variable. This is fixed upstream
+ # see https://github.com/opencollab/arpack-ng/issues/34
+ patch('pdlamch10.patch', when='@3.3:')
+
depends_on('blas')
depends_on('lapack')
depends_on('mpi', when='+mpi')
diff --git a/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch b/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch
new file mode 100644
index 0000000000..922828909f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch
@@ -0,0 +1,15 @@
+diff --git a/PARPACK/SRC/MPI/pdlamch10.f b/PARPACK/SRC/MPI/pdlamch10.f
+index 6571da9..2882c2e 100644
+--- a/PARPACK/SRC/MPI/pdlamch10.f
++++ b/PARPACK/SRC/MPI/pdlamch10.f
+@@ -86,8 +86,8 @@
+ TEMP = TEMP1
+ END IF
+ *
+- PDLAMCH = TEMP
++ PDLAMCH10 = TEMP
+ *
+-* End of PDLAMCH
++* End of PDLAMCH10
+ *
+ END
diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py
index e40046b452..8d6e672f86 100644
--- a/var/spack/repos/builtin/packages/eigen/package.py
+++ b/var/spack/repos/builtin/packages/eigen/package.py
@@ -48,7 +48,7 @@ class Eigen(Package):
depends_on('metis', when='+metis')
depends_on('scotch', when='+scotch')
depends_on('fftw', when='+fftw')
- depends_on('SuiteSparse', when='+suitesparse')
+ depends_on('suite-sparse', when='+suitesparse')
depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
depends_on('gmp')
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index 280e400f69..1d25d59e50 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -52,7 +52,7 @@ class Llvm(Package):
depends_on('cmake @2.8.12.2:')
# Universal dependency
- depends_on('python@2.7:')
+ depends_on('python@2.7:2.8') # Seems not to support python 3.X.Y
# lldb dependencies
depends_on('ncurses', when='+lldb')
@@ -133,6 +133,21 @@ class Llvm(Package):
}
},
{
+ 'version' : '3.8.0',
+ 'md5':'07a7a74f3c6bd65de4702bf941b511a0',
+ 'resources' : {
+ 'compiler-rt' : 'd6fcbe14352ffb708e4d1ac2e48bb025',
+ 'openmp' : '8fd7cc35d48051613cf1e750e9f22e40',
+ 'polly' : '1b3b20f52d34a4024e21a4ea7112caa7',
+ 'libcxx' : 'd6e0bdbbee39f7907ad74fd56d03b88a',
+ 'libcxxabi' : 'bbe6b4d72c7c5978550d370af529bcf7',
+ 'clang' : 'cc99e7019bb74e6459e80863606250c5',
+ 'clang-tools-extra' : 'c2344f50e0eea0b402f0092a80ddc036',
+ 'lldb' : 'a5da35ed9cc8c8817ee854e3dbfba00e',
+ 'llvm-libunwind' : '162ade468607f153cca12be90b5194fa',
+ }
+ },
+ {
'version' : '3.7.1',
'md5':'bf8b3a2c79e61212c5409041dfdbd319',
'resources' : {
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index e2b3654c19..b20dc8dd60 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class Mpich(Package):
"""MPICH is a high performance and widely portable implementation of
the Message Passing Interface (MPI) standard."""
@@ -46,14 +47,16 @@ class Mpich(Package):
provides('mpi@:3.0', when='@3:')
provides('mpi@:1.3', when='@1:')
- def setup_dependent_environment(self, module, spec, dep_spec):
- """For dependencies, make mpicc's use spack wrapper."""
- os.environ['MPICH_CC'] = os.environ['CC']
- os.environ['MPICH_CXX'] = os.environ['CXX']
- os.environ['MPICH_F77'] = os.environ['F77']
- os.environ['MPICH_F90'] = os.environ['FC']
- os.environ['MPICH_FC'] = os.environ['FC']
+ def setup_dependent_environment(self, env, dependent_spec):
+ env.set('MPICH_CC', spack_cc)
+ env.set('MPICH_CXX', spack_cxx)
+ env.set('MPICH_F77', spack_f77)
+ env.set('MPICH_F90', spack_f90)
+ env.set('MPICH_FC', spack_fc)
+ def setup_dependent_package(self, module, dep_spec):
+ """For dependencies, make mpicc's use spack wrapper."""
+ # FIXME : is this necessary ? Shouldn't this be part of a contract with MPI providers?
module.mpicc = join_path(self.prefix.bin, 'mpicc')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
index af5ed1b088..e4e95f92af 100644
--- a/var/spack/repos/builtin/packages/mvapich2/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -123,7 +123,7 @@ class Mvapich2(Package):
count += 1
if count > 1:
raise RuntimeError('network variants are mutually exclusive (only one can be selected at a time)')
-
+ network_options = []
# From here on I can suppose that only one variant has been selected
if self.enabled(Mvapich2.PSM) in spec:
network_options = ["--with-device=ch3:psm"]
diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py
index 741f4af421..78c5a053fe 100644
--- a/var/spack/repos/builtin/packages/netlib-lapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py
@@ -20,13 +20,13 @@ class NetlibLapack(Package):
version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4')
variant('shared', default=False, description="Build shared library version")
+ variant('fpic', default=False, description="Build with -fpic compiler option")
# virtual dependency
provides('lapack')
# blas is a virtual dependency.
depends_on('blas')
-
depends_on('cmake')
# Doesn't always build correctly in parallel
@@ -37,24 +37,23 @@ class NetlibLapack(Package):
blas = self.spec['netlib-blas']
return [join_path(blas.prefix.lib, 'blas.a')]
-
@when('^atlas')
def get_blas_libs(self):
blas = self.spec['atlas']
return [join_path(blas.prefix.lib, l)
for l in ('libf77blas.a', 'libatlas.a')]
-
def install(self, spec, prefix):
blas_libs = ";".join(self.get_blas_libs())
cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs]
if '+shared' in spec:
cmake_args.append('-DBUILD_SHARED_LIBS=ON')
+ if '+fpic' in spec:
+ cmake_args.append('-DCMAKE_POSITION_INDEPENDENT_CODE=ON')
cmake_args += std_cmake_args
cmake(*cmake_args)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
index 22d538560e..c3e6822cdf 100644
--- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
@@ -1,8 +1,9 @@
from spack import *
+import sys
class NetlibScalapack(Package):
"""ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines"""
-
+
homepage = "http://www.netlib.org/scalapack/"
url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
@@ -11,16 +12,16 @@ class NetlibScalapack(Package):
version('2.0.0', '9e76ae7b291be27faaad47cfc256cbfe')
# versions before 2.0.0 are not using cmake and requires blacs as
# a separated package
-
+
variant('shared', default=True, description='Build the shared library version')
variant('fpic', default=False, description="Build with -fpic compiler option")
-
+
provides('scalapack')
-
+
depends_on('mpi')
depends_on('lapack')
-
- def install(self, spec, prefix):
+
+ def install(self, spec, prefix):
options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else 'OFF'),
"-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else 'ON'),
@@ -32,19 +33,19 @@ class NetlibScalapack(Package):
"-DCMAKE_C_FLAGS=-fPIC",
"-DCMAKE_Fortran_FLAGS=-fPIC"
])
-
+
options.extend(std_cmake_args)
-
+
with working_dir('spack-build', create=True):
cmake('..', *options)
make()
make("install")
- def setup_dependent_environment(self, module, spec, dependent_spec):
- # TODO treat OS that are not Linux...
- lib_suffix = '.so' if '+shared' in spec['scalapack'] else '.a'
+ def setup_dependent_package(self, module, dependent_spec):
+ spec = self.spec
+ lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so'
+ lib_suffix = lib_dsuffix if '+shared' in spec else '.a'
- spec['scalapack'].fc_link = '-L%s -lscalapack' % spec['scalapack'].prefix.lib
- spec['scalapack'].cc_link = spec['scalapack'].fc_link
- spec['scalapack'].libraries = [join_path(spec['scalapack'].prefix.lib,
- 'libscalapack%s' % lib_suffix)]
+ spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib
+ spec.cc_link = spec.fc_link
+ spec.libraries = [join_path(spec.prefix.lib, 'libscalapack%s' % lib_suffix)]
diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py
index 38b355159d..6e99c23652 100644
--- a/var/spack/repos/builtin/packages/octave/package.py
+++ b/var/spack/repos/builtin/packages/octave/package.py
@@ -62,7 +62,7 @@ class Octave(Package):
depends_on('qrupdate', when='+qrupdate')
#depends_on('qscintilla', when='+qscintilla) # TODO: add package
depends_on('qt', when='+qt')
- depends_on('SuiteSparse', when='+suitesparse')
+ depends_on('suite-sparse',when='+suitesparse')
depends_on('zlib', when='+zlib')
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index e16d3fe89c..781a1e2ec8 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -1,4 +1,5 @@
from spack import *
+import sys
class Openblas(Package):
"""OpenBLAS: An optimized BLAS library"""
@@ -16,13 +17,14 @@ class Openblas(Package):
make('libs', 'netlib', 'shared', 'CC=cc', 'FC=f77')
make('install', "PREFIX='%s'" % prefix)
+ lib_dsuffix = 'dylib' if sys.platform == 'darwin' else 'so'
# Blas virtual package should provide blas.a and libblas.a
with working_dir(prefix.lib):
symlink('libopenblas.a', 'blas.a')
symlink('libopenblas.a', 'libblas.a')
- symlink('libopenblas.so', 'libblas.so')
+ symlink('libopenblas.%s' % lib_dsuffix, 'libblas.%s' % lib_dsuffix)
# Lapack virtual package should provide liblapack.a
with working_dir(prefix.lib):
symlink('libopenblas.a', 'liblapack.a')
- symlink('libopenblas.so', 'liblapack.so')
+ symlink('libopenblas.%s' % lib_dsuffix, 'liblapack.%s' % lib_dsuffix)
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index e4484af8c5..9a127f1812 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -41,12 +41,13 @@ class Openmpi(Package):
def url_for_version(self, version):
return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
- def setup_dependent_environment(self, module, spec, dep_spec):
- """For dependencies, make mpicc's use spack wrapper."""
- os.environ['OMPI_CC'] = 'cc'
- os.environ['OMPI_CXX'] = 'c++'
- os.environ['OMPI_FC'] = 'f90'
- os.environ['OMPI_F77'] = 'f77'
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ spack_env.set('OMPI_CC', spack_cc)
+ spack_env.set('OMPI_CXX', spack_cxx)
+ spack_env.set('OMPI_FC', spack_fc)
+ spack_env.set('OMPI_F77', spack_f77)
+
def install(self, spec, prefix):
config_args = ["--prefix=%s" % prefix,
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index ccf2d14c06..c16054816c 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -16,9 +16,9 @@ class Paraview(Package):
variant('osmesa', default=False, description='Enable OSMesa support')
variant('qt', default=False, description='Enable Qt support')
- variant('opengl2', default=False, description='Enable OPengl2 backend')
+ variant('opengl2', default=False, description='Enable OpenGL2 backend')
- depends_on('python', when='+python')
+ depends_on('python@2:2.7', when='+python')
depends_on('py-numpy', when='+python')
depends_on('py-matplotlib', when='+python')
depends_on('tcl', when='+tcl')
@@ -37,11 +37,11 @@ class Paraview(Package):
#depends_on('protobuf') # version mismatches?
#depends_on('sqlite') # external version not supported
depends_on('zlib')
-
+
def url_for_version(self, version):
"""Handle ParaView version-based custom URLs."""
return self._url_str % (version.up_to(2), version)
-
+
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index efe172fc08..7239baaf7f 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -61,14 +61,24 @@ class Petsc(Package):
errors = ['incompatible variants given'] + errors
raise RuntimeError('\n'.join(errors))
else:
- compiler_opts = [
- '--with-mpi=1',
- '--with-mpi-dir=%s' % self.spec['mpi'].prefix,
- ]
+ if self.compiler.name == "clang":
+ compiler_opts = [
+ '--with-mpi=1',
+ '--with-cc=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # Avoid confusing PETSc config by clang: warning: argument unused during compilation
+ '--with-cxx=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
+ '--with-fc=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ '--with-f77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'),
+ ]
+ else:
+ compiler_opts = [
+ '--with-mpi=1',
+ '--with-mpi-dir=%s' % self.spec['mpi'].prefix,
+ ]
return compiler_opts
def install(self, spec, prefix):
- options = []
+ options = ['--with-debugging=0',
+ '--with-ssl=0']
options.extend(self.mpi_dependent_options())
options.extend([
'--with-precision=%s' % ('double' if '+double' in spec else 'single'),
diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index e7c6cf0264..4fee99098e 100644
--- a/var/spack/repos/builtin/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
@@ -1,11 +1,12 @@
from spack import *
+
class PyNose(Package):
"""nose extends the test loading and running features of unittest,
making it easier to write, find and run tests."""
homepage = "https://pypi.python.org/pypi/nose"
- url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
+ url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
version('1.3.4', '6ed7169887580ddc9a8e16048d38274d')
version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16')
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index dd240d1ea0..4f55bc803e 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -1,11 +1,14 @@
+import functools
+import glob
+import inspect
import os
import re
from contextlib import closing
-from llnl.util.lang import match_predicate
-from spack.util.environment import *
-from spack import *
import spack
+from llnl.util.lang import match_predicate
+from spack import *
+from spack.util.environment import *
class Python(Package):
@@ -90,12 +93,28 @@ class Python(Package):
return os.path.join(self.python_lib_dir, 'site-packages')
- def setup_dependent_environment(self, module, spec, ext_spec):
- """Called before python modules' install() methods.
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ # TODO: do this only for actual extensions.
+
+ # Set PYTHONPATH to include site-packages dir for the
+ # extension and any other python extensions it depends on.
+ python_paths = []
+ for d in extension_spec.traverse():
+ if d.package.extends(self.spec):
+ python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
+
+ pythonpath = ':'.join(python_paths)
+ spack_env.set('PYTHONPATH', pythonpath)
+ run_env.set('PYTHONPATH', pythonpath)
+
+
+ def modify_module(self, module, spec, ext_spec):
+ """
+ Called before python modules' install() methods.
In most cases, extensions will only need to have one line::
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ python('setup.py', 'install', '--prefix=%s' % prefix)
"""
# Python extension builds can have a global python executable function
if self.version >= Version("3.0.0") and self.version < Version("4.0.0"):
@@ -111,15 +130,6 @@ class Python(Package):
# Make the site packages directory if it does not exist already.
mkdirp(module.site_packages_dir)
- # Set PYTHONPATH to include site-packages dir for the
- # extension and any other python extensions it depends on.
- python_paths = []
- for d in ext_spec.traverse():
- if d.package.extends(self.spec):
- python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
- os.environ['PYTHONPATH'] = ':'.join(python_paths)
-
-
# ========================================================================
# Handle specifics of activating and deactivating python modules.
# ========================================================================
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index ef5f05601f..d08e8e81e1 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -56,9 +56,12 @@ class Qt(Package):
depends_on("libxcb")
- def setup_dependent_environment(self, module, spec, dep_spec):
- """Dependencies of Qt find it using the QTDIR environment variable."""
- os.environ['QTDIR'] = self.prefix
+ def setup_environment(self, spack_env, env):
+ env.set('QTDIR', self.prefix)
+
+
+ def setup_dependent_environment(self, spack_env, run_env, dspec):
+ spack_env.set('QTDIR', self.prefix)
def patch(self):
diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py
index 6b6242362c..7ff1898ce9 100644
--- a/var/spack/repos/builtin/packages/ruby/package.py
+++ b/var/spack/repos/builtin/packages/ruby/package.py
@@ -1,9 +1,8 @@
from spack import *
-import spack
-import os
+
class Ruby(Package):
- """A dynamic, open source programming language with a focus on
+ """A dynamic, open source programming language with a focus on
simplicity and productivity."""
homepage = "https://www.ruby-lang.org/"
@@ -15,11 +14,23 @@ class Ruby(Package):
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
-
make()
make("install")
- def setup_dependent_environment(self, module, spec, ext_spec):
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ # TODO: do this only for actual extensions.
+ # Set GEM_PATH to include dependent gem directories
+ ruby_paths = []
+ for d in extension_spec.traverse():
+ if d.package.extends(self.spec):
+ ruby_paths.append(d.prefix)
+
+ spack_env.set_path('GEM_PATH', ruby_paths)
+
+ # The actual installation path for this gem
+ spack_env.set('GEM_HOME', extension_spec.prefix)
+
+ def modify_module(self, module, spec, ext_spec):
"""Called before ruby modules' install() methods. Sets GEM_HOME
and GEM_PATH to values appropriate for the package being built.
@@ -30,12 +41,3 @@ class Ruby(Package):
# Ruby extension builds have global ruby and gem functions
module.ruby = Executable(join_path(spec.prefix.bin, 'ruby'))
module.gem = Executable(join_path(spec.prefix.bin, 'gem'))
-
- # Set GEM_PATH to include dependent gem directories
- ruby_paths = []
- for d in ext_spec.traverse():
- if d.package.extends(self.spec):
- ruby_paths.append(d.prefix)
- os.environ['GEM_PATH'] = ':'.join(ruby_paths)
- # The actual installation path for this gem
- os.environ['GEM_HOME'] = ext_spec.prefix
diff --git a/var/spack/repos/builtin/packages/SuiteSparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py
index 6e130d118f..b57f9967c3 100644
--- a/var/spack/repos/builtin/packages/SuiteSparse/package.py
+++ b/var/spack/repos/builtin/packages/suite-sparse/package.py
@@ -1,7 +1,7 @@
from spack import *
-class Suitesparse(Package):
+class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
@@ -24,4 +24,3 @@ class Suitesparse(Package):
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
-
diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py
new file mode 100644
index 0000000000..c4c76909b3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/superlu-dist/package.py
@@ -0,0 +1,63 @@
+from spack import *
+
+class SuperluDist(Package):
+ """A general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines."""
+ homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/"
+ url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz"
+
+ version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae')
+ version('4.2', 'ae9fafae161f775fbac6eba11e530a65')
+ version('4.1', '4edee38cc29f687bd0c8eb361096a455')
+ version('4.0', 'c0b98b611df227ae050bc1635c6940e0')
+
+ depends_on ('mpi')
+ depends_on ('blas')
+ depends_on ('lapack')
+ depends_on ('parmetis')
+ depends_on ('metis')
+
+ def install(self, spec, prefix):
+ makefile_inc = []
+ makefile_inc.extend([
+ 'PLAT = _mac_x',
+ 'DSuperLUroot = %s' % self.stage.source_path, #self.stage.path, prefix
+ 'DSUPERLULIB = $(DSuperLUroot)/lib/libsuperlu_dist.a',
+ 'BLASDEF = -DUSE_VENDOR_BLAS',
+ 'BLASLIB = -L%s -llapack %s -lblas' % (spec['lapack'].prefix.lib, spec['blas'].prefix.lib), # FIXME: avoid hardcoding blas/lapack lib names
+ 'METISLIB = -L%s -lmetis' % spec['metis'].prefix.lib,
+ 'PARMETISLIB = -L%s -lparmetis' % spec['parmetis'].prefix.lib,
+ 'FLIBS =',
+ 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)',
+ 'ARCH = ar',
+ 'ARCHFLAGS = cr',
+ 'RANLIB = true',
+ 'CC = mpicc', # FIXME avoid hardcoding MPI compiler names
+ 'CFLAGS = -fPIC -std=c99 -O2 -I%s -I%s' %(spec['parmetis'].prefix.include, spec['metis'].prefix.include),
+ 'NOOPTS = -fPIC -std=c99',
+ 'FORTRAN = mpif77',
+ 'F90FLAGS = -O2',
+ 'LOADER = mpif77',
+ 'LOADOPTS =',
+ 'CDEFS = -DAdd_'
+ ])
+
+ #with working_dir('src'):
+ with open('make.inc', 'w') as fh:
+ fh.write('\n'.join(makefile_inc))
+
+ make("lib", parallel=False)
+
+ # FIXME:
+ # cd "EXAMPLE" do
+ # system "make"
+
+ # need to install by hand
+ headers_location = join_path(self.prefix.include,'superlu_dist')
+ mkdirp(headers_location)
+ # FIXME: fetch all headers in the folder automatically
+ for header in ['Cnames.h','cublas_utils.h','dcomplex.h','html_mainpage.h','machines.h','old_colamd.h','psymbfact.h','superlu_ddefs.h','superlu_defs.h','superlu_enum_consts.h','superlu_zdefs.h','supermatrix.h','util_dist.h']:
+ superludist_header = join_path(self.stage.source_path, 'SRC/',header)
+ install(superludist_header, headers_location)
+
+ superludist_lib = join_path(self.stage.source_path, 'lib/libsuperlu_dist.a')
+ install(superludist_lib,self.prefix.lib)