summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--etc/spack/modules.yaml8
-rw-r--r--lib/spack/docs/packaging_guide.rst98
-rw-r--r--lib/spack/llnl/util/lang.py6
-rw-r--r--lib/spack/spack/__init__.py7
-rw-r--r--lib/spack/spack/build_environment.py196
-rw-r--r--lib/spack/spack/cmd/info.py4
-rw-r--r--lib/spack/spack/cmd/module.py2
-rw-r--r--lib/spack/spack/cmd/uninstall.py23
-rw-r--r--lib/spack/spack/compilers/gcc.py3
-rw-r--r--lib/spack/spack/concretize.py8
-rw-r--r--lib/spack/spack/config.py28
-rw-r--r--lib/spack/spack/directory_layout.py2
-rw-r--r--lib/spack/spack/environment.py252
-rw-r--r--lib/spack/spack/modules.py239
-rw-r--r--lib/spack/spack/package.py214
-rw-r--r--lib/spack/spack/test/__init__.py3
-rw-r--r--lib/spack/spack/test/concretize.py16
-rw-r--r--lib/spack/spack/test/database.py1
-rw-r--r--lib/spack/spack/test/environment.py73
-rw-r--r--lib/spack/spack/url.py2
-rw-r--r--lib/spack/spack/util/compression.py7
-rw-r--r--lib/spack/spack/util/environment.py8
-rwxr-xr-xshare/spack/setup-env.sh2
-rw-r--r--var/spack/repos/builtin.mock/packages/python/package.py43
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/package.py9
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch15
-rw-r--r--var/spack/repos/builtin/packages/atlas/package.py58
-rw-r--r--var/spack/repos/builtin/packages/cmake/package.py7
-rw-r--r--var/spack/repos/builtin/packages/cryptopp/package.py10
-rw-r--r--var/spack/repos/builtin/packages/curl/package.py1
-rw-r--r--var/spack/repos/builtin/packages/eigen/package.py2
-rw-r--r--var/spack/repos/builtin/packages/espresso/package.py4
-rw-r--r--var/spack/repos/builtin/packages/gettext/package.py30
-rw-r--r--var/spack/repos/builtin/packages/hypre/package.py5
-rw-r--r--var/spack/repos/builtin/packages/libelf/package.py3
-rw-r--r--var/spack/repos/builtin/packages/llvm/package.py17
-rw-r--r--var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch22
-rw-r--r--var/spack/repos/builtin/packages/metis/package.py13
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py17
-rw-r--r--var/spack/repos/builtin/packages/mumps/package.py31
-rw-r--r--var/spack/repos/builtin/packages/muparser/package.py18
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/package.py2
-rw-r--r--var/spack/repos/builtin/packages/netcdf/package.py7
-rw-r--r--var/spack/repos/builtin/packages/netlib-blas/package.py46
-rw-r--r--var/spack/repos/builtin/packages/netlib-lapack/package.py69
-rw-r--r--var/spack/repos/builtin/packages/netlib-scalapack/package.py31
-rw-r--r--var/spack/repos/builtin/packages/oce/package.py47
-rw-r--r--var/spack/repos/builtin/packages/octave/package.py2
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py17
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py13
-rw-r--r--var/spack/repos/builtin/packages/p4est/package.py34
-rw-r--r--var/spack/repos/builtin/packages/papi/package.py1
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py8
-rw-r--r--var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch71
-rw-r--r--var/spack/repos/builtin/packages/parmetis/package.py30
-rw-r--r--var/spack/repos/builtin/packages/parmetis/pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch77
-rw-r--r--var/spack/repos/builtin/packages/parmetis/pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch35
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-numpy/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-scipy/package.py5
-rw-r--r--var/spack/repos/builtin/packages/python/package.py52
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py9
-rw-r--r--var/spack/repos/builtin/packages/ruby/package.py34
-rw-r--r--var/spack/repos/builtin/packages/silo/package.py23
-rw-r--r--var/spack/repos/builtin/packages/slepc/package.py49
-rw-r--r--var/spack/repos/builtin/packages/suite-sparse/package.py (renamed from var/spack/repos/builtin/packages/SuiteSparse/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/superlu-dist/package.py64
-rw-r--r--var/spack/repos/builtin/packages/tbb/package.py79
69 files changed, 1829 insertions, 550 deletions
diff --git a/etc/spack/modules.yaml b/etc/spack/modules.yaml
new file mode 100644
index 0000000000..aa2a2c3fe2
--- /dev/null
+++ b/etc/spack/modules.yaml
@@ -0,0 +1,8 @@
+# -------------------------------------------------------------------------
+# This is the default spack module files generation configuration.
+#
+# Changes to this file will affect all users of this spack install,
+# although users can override these settings in their ~/.spack/modules.yaml.
+# -------------------------------------------------------------------------
+modules:
+ enable: ['tcl', 'dotkit']
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 169899212d..519c0da232 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1559,11 +1559,11 @@ you ask for a particular spec.
A user may have certain preferences for how packages should
be concretized on their system. For example, one user may prefer packages
built with OpenMPI and the Intel compiler. Another user may prefer
-packages be built with MVAPICH and GCC.
+packages be built with MVAPICH and GCC.
Spack can be configured to prefer certain compilers, package
versions, depends_on, and variants during concretization.
-The preferred configuration can be controlled via the
+The preferred configuration can be controlled via the
``~/.spack/packages.yaml`` file for user configuations, or the
``etc/spack/packages.yaml`` site configuration.
@@ -1582,32 +1582,32 @@ Here's an example packages.yaml file that sets preferred packages:
compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
providers:
mpi: [mvapich, mpich, openmpi]
-
+
At a high level, this example is specifying how packages should be
-concretized. The dyninst package should prefer using gcc 4.9 and
+concretized. The dyninst package should prefer using gcc 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich for
-its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
-These options are used to fill in implicit defaults. Any of them can be overwritten
+its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
+These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
-Each packages.yaml file begins with the string ``packages:`` and
+Each packages.yaml file begins with the string ``packages:`` and
package names are specified on the next level. The special string ``all``
-applies settings to each package. Underneath each package name is
-one or more components: ``compiler``, ``variants``, ``version``,
-or ``providers``. Each component has an ordered list of spec
+applies settings to each package. Underneath each package name is
+one or more components: ``compiler``, ``variants``, ``version``,
+or ``providers``. Each component has an ordered list of spec
``constraints``, with earlier entries in the list being preferred over
later entries.
-Sometimes a package installation may have constraints that forbid
+Sometimes a package installation may have constraints that forbid
the first concretization rule, in which case Spack will use the first
legal concretization rule. Going back to the example, if a user
-requests gperftools 2.3 or later, then Spack will install version 2.4
+requests gperftools 2.3 or later, then Spack will install version 2.4
as the 2.4 version of gperftools is preferred over 2.3.
-An explicit concretization rule in the preferred section will always
-take preference over unlisted concretizations. In the above example,
+An explicit concretization rule in the preferred section will always
+take preference over unlisted concretizations. In the above example,
xlc isn't listed in the compiler list. Every listed compiler from
gcc to pgi will thus be preferred over the xlc compiler.
@@ -1844,6 +1844,20 @@ dedicated process.
.. _prefix-objects:
+
+Failing the build
+----------------------
+
+Sometimes you don't want a package to successfully install unless some
+condition is true. You can explicitly cause the build to fail from
+``install()`` by raising an ``InstallError``, for example:
+
+.. code-block:: python
+
+ if spec.architecture.startswith('darwin'):
+ raise InstallError('This package does not build on Mac OS X!')
+
+
Prefix objects
----------------------
@@ -2160,6 +2174,62 @@ package, this allows us to avoid race conditions in the library's
build system.
+.. _sanity-checks:
+
+Sanity checking an intallation
+--------------------------------
+
+By default, Spack assumes that a build has failed if nothing is
+written to the install prefix, and that it has succeeded if anything
+(a file, a directory, etc.) is written to the install prefix after
+``install()`` completes.
+
+Consider a simple autotools build like this:
+
+.. code-block:: python
+
+ def install(self, spec, prefix):
+ configure("--prefix=" + prefix)
+ make()
+ make("install")
+
+If you are using using standard autotools or CMake, ``configure`` and
+``make`` will not write anything to the install prefix. Only ``make
+install`` writes the files, and only once the build is already
+complete. Not all builds are like this. Many builds of scientific
+software modify the install prefix *before* ``make install``. Builds
+like this can falsely report that they were successfully installed if
+an error occurs before the install is complete but after files have
+been written to the ``prefix``.
+
+
+``sanity_check_is_file`` and ``sanity_check_is_dir``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can optionally specify *sanity checks* to deal with this problem.
+Add properties like this to your package:
+
+.. code-block:: python
+
+ class MyPackage(Package):
+ ...
+
+ sanity_check_is_file = ['include/libelf.h']
+ sanity_check_is_dir = [lib]
+
+ def install(self, spec, prefix):
+ configure("--prefix=" + prefix)
+ make()
+ make("install")
+
+Now, after ``install()`` runs, Spack will check whether
+``$prefix/include/libelf.h`` exists and is a file, and whether
+``$prefix/lib`` exists and is a directory. If the checks fail, then
+the build will fail and the install prefix will be removed. If they
+succeed, Spack considers the build succeeful and keeps the prefix in
+place.
+
+
.. _file-manipulation:
File manipulation functions
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 13d301f84e..3b4e2c8352 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -117,7 +117,8 @@ def caller_locals():
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
- stack = inspect.stack()
+ # Passing zero here skips line context for speed.
+ stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
@@ -128,7 +129,8 @@ def get_calling_module_name():
"""Make sure that the caller is a class definition, and return the
enclosing module's name.
"""
- stack = inspect.stack()
+ # Passing zero here skips line context for speed.
+ stack = inspect.stack(0)
try:
# Make sure locals contain __module__
caller_locals = stack[2][0].f_locals
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 3051d3f742..aee11f061f 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -188,3 +188,10 @@ __all__ += spack.directives.__all__
import spack.util.executable
from spack.util.executable import *
__all__ += spack.util.executable.__all__
+
+from spack.package import \
+ install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
+ InstallError, ExternalPackageError
+__all__ += [
+ 'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
+ 'InstallError', 'ExternalPackageError']
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 87fc310b5a..119a255a34 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -3,7 +3,7 @@ This module contains all routines related to setting up the package
build environment. All of this is set up by package.py just before
install() is called.
-There are two parts to the bulid environment:
+There are two parts to the build environment:
1. Python build environment (i.e. install() method)
@@ -13,7 +13,7 @@ There are two parts to the bulid environment:
the package's module scope. Ths allows package writers to call
them all directly in Package.install() without writing 'self.'
everywhere. No, this isn't Pythonic. Yes, it makes the code more
- readable and more like the shell script from whcih someone is
+ readable and more like the shell script from which someone is
likely porting.
2. Build execution environment
@@ -27,17 +27,18 @@ There are two parts to the bulid environment:
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
-import os
-import sys
-import shutil
import multiprocessing
+import os
import platform
-from llnl.util.filesystem import *
+import shutil
+import sys
import spack
-import spack.compilers as compilers
-from spack.util.executable import Executable, which
+import llnl.util.tty as tty
+from llnl.util.filesystem import *
+from spack.environment import EnvironmentModifications, validate
from spack.util.environment import *
+from spack.util.executable import Executable, which
#
# This can be set by the user to globally disable parallel builds.
@@ -83,85 +84,88 @@ class MakeExecutable(Executable):
return super(MakeExecutable, self).__call__(*args, **kwargs)
-def set_compiler_environment_variables(pkg):
- assert(pkg.spec.concrete)
- compiler = pkg.compiler
-
+def set_compiler_environment_variables(pkg, env):
+ assert pkg.spec.concrete
# Set compiler variables used by CMake and autotools
- assert all(key in pkg.compiler.link_paths
- for key in ('cc', 'cxx', 'f77', 'fc'))
+ assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
+ # Populate an object with the list of environment modifications
+ # and return it
+ # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
link_dir = spack.build_env_path
- os.environ['CC'] = join_path(link_dir, pkg.compiler.link_paths['cc'])
- os.environ['CXX'] = join_path(link_dir, pkg.compiler.link_paths['cxx'])
- os.environ['F77'] = join_path(link_dir, pkg.compiler.link_paths['f77'])
- os.environ['FC'] = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
+ env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
+ env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
+ env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
+ compiler = pkg.compiler
if compiler.cc:
- os.environ['SPACK_CC'] = compiler.cc
+ env.set('SPACK_CC', compiler.cc)
if compiler.cxx:
- os.environ['SPACK_CXX'] = compiler.cxx
+ env.set('SPACK_CXX', compiler.cxx)
if compiler.f77:
- os.environ['SPACK_F77'] = compiler.f77
+ env.set('SPACK_F77', compiler.f77)
if compiler.fc:
- os.environ['SPACK_FC'] = compiler.fc
+ env.set('SPACK_FC', compiler.fc)
- os.environ['SPACK_COMPILER_SPEC'] = str(pkg.spec.compiler)
+ env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+ return env
-def set_build_environment_variables(pkg):
- """This ensures a clean install environment when we build packages.
+def set_build_environment_variables(pkg, env):
+ """
+ This ensures a clean install environment when we build packages
"""
# Add spack build environment path with compiler wrappers first in
# the path. We add both spack.env_path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name
- # when we're building wtih the wrappers.
+ # when we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
- def add_env_path(path):
- env_paths.append(path)
- ci = join_path(path, 'case-insensitive')
- if os.path.isdir(ci): env_paths.append(ci)
- add_env_path(spack.build_env_path)
- add_env_path(join_path(spack.build_env_path, pkg.compiler.name))
-
- path_put_first("PATH", env_paths)
- path_set(SPACK_ENV_PATH, env_paths)
-
- # Prefixes of all of the package's dependencies go in
- # SPACK_DEPENDENCIES
+ for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
+ env_paths.append(item)
+ ci = join_path(item, 'case-insensitive')
+ if os.path.isdir(ci):
+ env_paths.append(ci)
+
+ for item in reversed(env_paths):
+ env.prepend_path('PATH', item)
+ env.set_path(SPACK_ENV_PATH, env_paths)
+
+ # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
- path_set(SPACK_DEPENDENCIES, dep_prefixes)
+ env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
+ env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
# Install prefix
- os.environ[SPACK_PREFIX] = pkg.prefix
+ env.set(SPACK_PREFIX, pkg.prefix)
# Install root prefix
- os.environ[SPACK_INSTALL] = spack.install_path
+ env.set(SPACK_INSTALL, spack.install_path)
# Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
- pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
+ env.unset('LD_LIBRARY_PATH')
+ env.unset('LD_RUN_PATH')
+ env.unset('DYLD_LIBRARY_PATH')
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes]
- path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)])
+ bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
+ for item in bin_dirs:
+ env.prepend_path('PATH', item)
# Working directory for the spack command itself, for debug logs.
if spack.debug:
- os.environ[SPACK_DEBUG] = "TRUE"
- os.environ[SPACK_SHORT_SPEC] = pkg.spec.short_spec
- os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
-
- # Add dependencies to CMAKE_PREFIX_PATH
- path_set("CMAKE_PREFIX_PATH", dep_prefixes)
+ env.set(SPACK_DEBUG, 'TRUE')
+ env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
+ env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
pkg_config_dirs = []
@@ -170,10 +174,12 @@ def set_build_environment_variables(pkg):
pcdir = join_path(p, libdir, 'pkgconfig')
if os.path.isdir(pcdir):
pkg_config_dirs.append(pcdir)
- path_set("PKG_CONFIG_PATH", pkg_config_dirs)
+ env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+
+ return env
-def set_module_variables_for_package(pkg, m):
+def set_module_variables_for_package(pkg, module):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
@@ -183,6 +189,8 @@ def set_module_variables_for_package(pkg, m):
jobs = 1
elif pkg.make_jobs:
jobs = pkg.make_jobs
+
+ m = module
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
@@ -217,7 +225,7 @@ def set_module_variables_for_package(pkg, m):
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
- m.spack_f90 = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Emulate some shell commands for convenience
m.pwd = os.getcwd
@@ -262,24 +270,63 @@ def parent_class_modules(cls):
return result
+def setup_module_variables_for_dag(pkg):
+ """Set module-scope variables for all packages in the DAG."""
+ for spec in pkg.spec.traverse(order='post'):
+ # If a user makes their own package repo, e.g.
+ # spack.repos.mystuff.libelf.Libelf, and they inherit from
+ # an existing class like spack.repos.original.libelf.Libelf,
+ # then set the module variables for both classes so the
+ # parent class can still use them if it gets called.
+ spkg = spec.package
+ modules = parent_class_modules(spkg.__class__)
+ for mod in modules:
+ set_module_variables_for_package(spkg, mod)
+ set_module_variables_for_package(spkg, spkg.module)
+
+
def setup_package(pkg):
"""Execute all environment setup routines."""
- set_compiler_environment_variables(pkg)
- set_build_environment_variables(pkg)
+ spack_env = EnvironmentModifications()
+ run_env = EnvironmentModifications()
+
+ # Before proceeding, ensure that specs and packages are consistent
+ #
+ # This is a confusing behavior due to how packages are
+ # constructed. `setup_dependent_package` may set attributes on
+ # specs in the DAG for use by other packages' install
+ # method. However, spec.package will look up a package via
+ # spack.repo, which defensively copies specs into packages. This
+ # code ensures that all packages in the DAG have pieces of the
+ # same spec object at build time.
+ #
+ # This is safe for the build process, b/c the build process is a
+ # throwaway environment, but it is kind of dirty.
+ #
+ # TODO: Think about how to avoid this fix and do something cleaner.
+ for s in pkg.spec.traverse(): s.package.spec = s
+
+ set_compiler_environment_variables(pkg, spack_env)
+ set_build_environment_variables(pkg, spack_env)
+ setup_module_variables_for_dag(pkg)
+
+ # Allow dependencies to modify the module
+ spec = pkg.spec
+ for dependency_spec in spec.traverse(root=False):
+ dpkg = dependency_spec.package
+ dpkg.setup_dependent_package(pkg.module, spec)
+
+ # Allow dependencies to set up environment as well
+ for dependency_spec in spec.traverse(root=False):
+ dpkg = dependency_spec.package
+ dpkg.setup_dependent_environment(spack_env, run_env, spec)
- # If a user makes their own package repo, e.g.
- # spack.repos.mystuff.libelf.Libelf, and they inherit from
- # an existing class like spack.repos.original.libelf.Libelf,
- # then set the module variables for both classes so the
- # parent class can still use them if it gets called.
- modules = parent_class_modules(pkg.__class__)
- for mod in modules:
- set_module_variables_for_package(pkg, mod)
+ # Allow the package to apply some settings.
+ pkg.setup_environment(spack_env, run_env)
- # Allow dependencies to set up environment as well.
- for dep_spec in pkg.spec.traverse(root=False):
- dep_spec.package.setup_dependent_environment(
- pkg.module, dep_spec, pkg.spec)
+ # Make sure nothing's strange about the Spack environment.
+ validate(spack_env, tty.warn)
+ spack_env.apply_modifications()
def fork(pkg, function):
@@ -296,23 +343,23 @@ def fork(pkg, function):
# do stuff
build_env.fork(pkg, child_fun)
- Forked processes are run with the build environemnt set up by
+ Forked processes are run with the build environment set up by
spack.build_environment. This allows package authors to have
- full control over the environment, etc. without offecting
+ full control over the environment, etc. without affecting
other builds that might be executed in the same spack call.
- If something goes wrong, the child process is expected toprint
+ If something goes wrong, the child process is expected to print
the error and the parent process will exit with error as
well. If things go well, the child exits and the parent
carries on.
"""
try:
pid = os.fork()
- except OSError, e:
+ except OSError as e:
raise InstallError("Unable to fork build process: %s" % e)
if pid == 0:
- # Give the child process the package's build environemnt.
+ # Give the child process the package's build environment.
setup_package(pkg)
try:
@@ -323,7 +370,7 @@ def fork(pkg, function):
# which interferes with unit tests.
os._exit(0)
- except spack.error.SpackError, e:
+ except spack.error.SpackError as e:
e.die()
except:
@@ -338,8 +385,7 @@ def fork(pkg, function):
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
- raise InstallError("Installation process had nonzero exit code."
- .format(str(returncode)))
+ raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
class InstallError(spack.error.SpackError):
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index e7abe7f4a5..c93db55c63 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -52,7 +52,7 @@ def print_text_info(pkg):
print "Safe versions: "
if not pkg.versions:
- print("None")
+ print(" None")
else:
pad = padder(pkg.versions, 4)
for v in reversed(sorted(pkg.versions)):
@@ -62,7 +62,7 @@ def print_text_info(pkg):
print
print "Variants:"
if not pkg.variants:
- print "None"
+ print " None"
else:
pad = padder(pkg.variants, 4)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 1d6867c1d9..315d9fc926 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -80,7 +80,7 @@ def module_find(mtype, spec_array):
if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec))
- print mod.use_name
+ print(mod.use_name)
def module_refresh():
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index d01aa2136b..350ef372cb 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -22,6 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
import sys
import argparse
@@ -63,12 +64,12 @@ def uninstall(parser, args):
matching_specs = spack.installed_db.query(spec)
if not args.all and len(matching_specs) > 1:
tty.error("%s matches multiple packages:" % spec)
- print
+ print()
display_specs(matching_specs, long=True)
- print
- print "You can either:"
- print " a) Use a more specific spec, or"
- print " b) use spack uninstall -a to uninstall ALL matching specs."
+ print()
+ print("You can either:")
+ print(" a) Use a more specific spec, or")
+ print(" b) use spack uninstall -a to uninstall ALL matching specs.")
sys.exit(1)
if len(matching_specs) == 0:
@@ -79,7 +80,7 @@ def uninstall(parser, args):
try:
# should work if package is known to spack
pkgs.append(s.package)
- except spack.repository.UnknownPackageError, e:
+ except spack.repository.UnknownPackageError as e:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(s).do_uninstall(force=True)
@@ -94,11 +95,11 @@ def uninstall(parser, args):
for pkg in pkgs:
try:
pkg.do_uninstall(force=args.force)
- except PackageStillNeededError, e:
+ except PackageStillNeededError as e:
tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True))
- print
- print "The following packages depend on it:"
+ print('')
+ print("The following packages depend on it:")
display_specs(e.dependents, long=True)
- print
- print "You can use spack uninstall -f to force this action."
+ print('')
+ print("You can use spack uninstall -f to force this action.")
sys.exit(1)
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 495b638a3a..64214db32d 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -40,7 +40,8 @@ class Gcc(Compiler):
fc_names = ['gfortran']
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
- suffixes = [r'-mp-\d\.\d']
+ # Homebrew and Linuxes may build gcc with -X, -X.Y suffixes
+ suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path
link_paths = {'cc' : 'gcc/gcc',
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 8083f91982..ed9bf79868 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -159,6 +159,10 @@ class DefaultConcretizer(object):
if any(v.satisfies(sv) for sv in spec.versions)],
cmp=cmp_versions)
+ def prefer_key(v):
+ return pkg.versions.get(Version(v)).get('preferred', False)
+ valid_versions.sort(key=prefer_key, reverse=True)
+
if valid_versions:
spec.versions = ver([valid_versions[0]])
else:
@@ -241,7 +245,7 @@ class DefaultConcretizer(object):
return False
#Find the another spec that has a compiler, or the root if none do
- other_spec = find_spec(spec, lambda(x) : x.compiler)
+ other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -288,7 +292,7 @@ def find_spec(spec, condition):
if condition(spec):
return spec
- return None # Nohting matched the condition.
+ return None # Nothing matched the condition.
def cmp_specs(lhs, rhs):
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 6afd69b3ac..14e5aaf4fb 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -237,7 +237,29 @@ section_schemas = {
'type' : 'object',
'default' : {},
}
- },},},},},}
+ },},},},},},
+ 'modules': {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack module file configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'modules:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'enable': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ }
+ }
+ },
+ },
+ },
}
"""OrderedDict of config scopes keyed by name.
@@ -405,11 +427,11 @@ def _read_config_file(filename, schema):
validate_section(data, schema)
return data
- except MarkedYAMLError, e:
+ except MarkedYAMLError as e:
raise ConfigFileError(
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
- except IOError, e:
+ except IOError as e:
raise ConfigFileError(
"Error reading configuration file %s: %s" % (filename, str(e)))
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 39ee4e203d..da8f1aa1bc 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -150,7 +150,7 @@ class DirectoryLayout(object):
if os.path.exists(path):
try:
shutil.rmtree(path)
- except exceptions.OSError, e:
+ except exceptions.OSError as e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
new file mode 100644
index 0000000000..72aafa4e2d
--- /dev/null
+++ b/lib/spack/spack/environment.py
@@ -0,0 +1,252 @@
+import os
+import os.path
+import collections
+import inspect
+
+
+class NameModifier(object):
+ def __init__(self, name, **kwargs):
+ self.name = name
+ self.args = {'name': name}
+ self.args.update(kwargs)
+
+
+class NameValueModifier(object):
+ def __init__(self, name, value, **kwargs):
+ self.name = name
+ self.value = value
+ self.args = {'name': name, 'value': value}
+ self.args.update(kwargs)
+
+
+class SetEnv(NameValueModifier):
+ def execute(self):
+ os.environ[self.name] = str(self.value)
+
+
+class UnsetEnv(NameModifier):
+ def execute(self):
+ os.environ.pop(self.name, None) # Avoid throwing if the variable was not set
+
+
+class SetPath(NameValueModifier):
+ def execute(self):
+ string_path = concatenate_paths(self.value)
+ os.environ[self.name] = string_path
+
+
+class AppendPath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories.append(os.path.normpath(self.value))
+ os.environ[self.name] = ':'.join(directories)
+
+
+class PrependPath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories = [os.path.normpath(self.value)] + directories
+ os.environ[self.name] = ':'.join(directories)
+
+
+class RemovePath(NameValueModifier):
+ def execute(self):
+ environment_value = os.environ.get(self.name, '')
+ directories = environment_value.split(':') if environment_value else []
+ directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)]
+ os.environ[self.name] = ':'.join(directories)
+
+
+class EnvironmentModifications(object):
+ """
+ Keeps track of requests to modify the current environment.
+
+ Each call to a method to modify the environment stores the extra information on the caller in the request:
+ - 'filename' : filename of the module where the caller is defined
+ - 'lineno': line number where the request occurred
+ - 'context' : line of code that issued the request that failed
+ """
+
+ def __init__(self, other=None):
+ """
+ Initializes a new instance, copying commands from other if it is not None
+
+ Args:
+ other: another instance of EnvironmentModifications from which (optional)
+ """
+ self.env_modifications = []
+ if other is not None:
+ self.extend(other)
+
+ def __iter__(self):
+ return iter(self.env_modifications)
+
+ def __len__(self):
+ return len(self.env_modifications)
+
+ def extend(self, other):
+ self._check_other(other)
+ self.env_modifications.extend(other.env_modifications)
+
+ @staticmethod
+ def _check_other(other):
+ if not isinstance(other, EnvironmentModifications):
+ raise TypeError('other must be an instance of EnvironmentModifications')
+
+ def _get_outside_caller_attributes(self):
+ stack = inspect.stack()
+ try:
+ _, filename, lineno, _, context, index = stack[2]
+ context = context[index].strip()
+ except Exception:
+ filename, lineno, context = 'unknown file', 'unknown line', 'unknown context'
+ args = {
+ 'filename': filename,
+ 'lineno': lineno,
+ 'context': context
+ }
+ return args
+
+ def set(self, name, value, **kwargs):
+ """
+ Stores in the current object a request to set an environment variable
+
+ Args:
+ name: name of the environment variable to be set
+ value: value of the environment variable
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = SetEnv(name, value, **kwargs)
+ self.env_modifications.append(item)
+
+ def unset(self, name, **kwargs):
+ """
+ Stores in the current object a request to unset an environment variable
+
+ Args:
+ name: name of the environment variable to be set
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = UnsetEnv(name, **kwargs)
+ self.env_modifications.append(item)
+
+ def set_path(self, name, elts, **kwargs):
+ """
+ Stores a request to set a path generated from a list.
+
+ Args:
+ name: name o the environment variable to be set.
+ elts: elements of the path to set.
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = SetPath(name, elts, **kwargs)
+ self.env_modifications.append(item)
+
+ def append_path(self, name, path, **kwargs):
+ """
+ Stores in the current object a request to append a path to a path list
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be appended
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = AppendPath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def prepend_path(self, name, path, **kwargs):
+ """
+ Same as `append_path`, but the path is pre-pended
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be pre-pended
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = PrependPath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def remove_path(self, name, path, **kwargs):
+ """
+ Stores in the current object a request to remove a path from a path list
+
+ Args:
+ name: name of the path list in the environment
+ path: path to be removed
+ """
+ kwargs.update(self._get_outside_caller_attributes())
+ item = RemovePath(name, path, **kwargs)
+ self.env_modifications.append(item)
+
+ def group_by_name(self):
+ """
+ Returns a dict of the modifications grouped by variable name
+
+ Returns:
+ dict mapping the environment variable name to the modifications to be done on it
+ """
+ modifications = collections.defaultdict(list)
+ for item in self:
+ modifications[item.name].append(item)
+ return modifications
+
+ def clear(self):
+ """
+ Clears the current list of modifications
+ """
+ self.env_modifications.clear()
+
+ def apply_modifications(self):
+ """
+ Applies the modifications and clears the list
+ """
+ modifications = self.group_by_name()
+ # Apply the modifications to the environment variables one variable at a time
+ for name, actions in sorted(modifications.items()):
+ for x in actions:
+ x.execute()
+
+
+def concatenate_paths(paths):
+ """
+ Concatenates an iterable of paths into a string of column separated paths
+
+ Args:
+ paths: iterable of paths
+
+ Returns:
+ string
+ """
+ return ':'.join(str(item) for item in paths)
+
+
+def set_or_unset_not_first(variable, changes, errstream):
+ """
+ Check if we are going to set or unset something after other modifications have already been requested
+ """
+ indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
+ if indexes:
+ good = '\t \t{context} at {filename}:{lineno}'
+ nogood = '\t--->\t{context} at {filename}:{lineno}'
+ errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable))
+ for ii, item in enumerate(changes):
+ print_format = nogood if ii in indexes else good
+ errstream(print_format.format(**item.args))
+
+
+def validate(env, errstream):
+ """
+ Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for
+ everything that was found
+
+ Current checks:
+ - set or unset variables after other changes on the same variable
+
+ Args:
+ env: list of environment modifications
+ """
+ modifications = env.group_by_name()
+ for variable, list_of_changes in sorted(modifications.items()):
+ set_or_unset_not_first(variable, list_of_changes, errstream)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index c27043db8c..d45fdde703 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -22,14 +22,12 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""This module contains code for creating environment modules, which
-can include dotkits, tcl modules, lmod, and others.
+"""
+This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others.
-The various types of modules are installed by post-install hooks and
-removed after an uninstall by post-uninstall hooks. This class
-consolidates the logic for creating an abstract description of the
-information that module systems need. Currently that includes a
-number of directories to be appended to paths in the user's environment:
+The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks.
+This class consolidates the logic for creating an abstract description of the information that module systems need.
+Currently that includes a number of directories to be appended to paths in the user's environment:
* /bin directories to be appended to PATH
* /lib* directories for LD_LIBRARY_PATH
@@ -37,30 +35,30 @@ number of directories to be appended to paths in the user's environment:
* /man* and /share/man* directories for MANPATH
* the package prefix for CMAKE_PREFIX_PATH
-This module also includes logic for coming up with unique names for
-the module files so that they can be found by the various
-shell-support files in $SPACK/share/spack/setup-env.*.
+This module also includes logic for coming up with unique names for the module files so that they can be found by the
+various shell-support files in $SPACK/share/spack/setup-env.*.
-Each hook in hooks/ implements the logic for writing its specific type
-of module file.
+Each hook in hooks/ implements the logic for writing its specific type of module file.
"""
-__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-
import os
+import os.path
import re
-import textwrap
import shutil
-from glob import glob
+import textwrap
import llnl.util.tty as tty
+import spack
+import spack.config
from llnl.util.filesystem import join_path, mkdirp
+from spack.environment import *
-import spack
+__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-"""Registry of all types of modules. Entries created by EnvModule's
- metaclass."""
+# Registry of all types of modules. Entries created by EnvModule's metaclass
module_types = {}
+CONFIGURATION = spack.config.get_config('modules')
+
def print_help():
"""For use by commands to tell user how to activate shell support."""
@@ -79,75 +77,76 @@ def print_help():
"")
+def inspect_path(prefix):
+ """
+ Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment
+ accordingly when an item is found.
+
+ Args:
+ prefix: prefix of the installation
+
+ Returns:
+ instance of EnvironmentModifications containing the requested modifications
+ """
+ env = EnvironmentModifications()
+ # Inspect the prefix to check for the existence of common directories
+ prefix_inspections = {
+ 'bin': ('PATH',),
+ 'man': ('MANPATH',),
+ 'lib': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
+ 'lib64': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
+ 'include': ('CPATH',)
+ }
+ for attribute, variables in prefix_inspections.items():
+ expected = getattr(prefix, attribute)
+ if os.path.isdir(expected):
+ for variable in variables:
+ env.prepend_path(variable, expected)
+ # PKGCONFIG
+ for expected in (join_path(prefix.lib, 'pkgconfig'), join_path(prefix.lib64, 'pkgconfig')):
+ if os.path.isdir(expected):
+ env.prepend_path('PKG_CONFIG_PATH', expected)
+ # CMake related variables
+ env.prepend_path('CMAKE_PREFIX_PATH', prefix)
+ return env
+
+
class EnvModule(object):
name = 'env_module'
+ formats = {}
class __metaclass__(type):
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
- if cls.name != 'env_module':
+ if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']:
module_types[cls.name] = cls
-
def __init__(self, spec=None):
- # category in the modules system
- # TODO: come up with smarter category names.
- self.category = "spack"
+ self.spec = spec
+ self.pkg = spec.package # Just stored for convenience
- # Descriptions for the module system's UI
- self.short_description = ""
- self.long_description = ""
+ # short description default is just the package + version
+ # packages can provide this optional attribute
+ self.short_description = spec.format("$_ $@")
+ if hasattr(self.pkg, 'short_description'):
+ self.short_description = self.pkg.short_description
- # dict pathname -> list of directories to be prepended to in
- # the module file.
- self._paths = None
- self.spec = spec
+ # long description is the docstring with reduced whitespace.
+ self.long_description = None
+ if self.spec.package.__doc__:
+ self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
@property
- def paths(self):
- if self._paths is None:
- self._paths = {}
-
- def add_path(path_name, directory):
- path = self._paths.setdefault(path_name, [])
- path.append(directory)
-
- # Add paths if they exist.
- for var, directory in [
- ('PATH', self.spec.prefix.bin),
- ('MANPATH', self.spec.prefix.man),
- ('MANPATH', self.spec.prefix.share_man),
- ('LIBRARY_PATH', self.spec.prefix.lib),
- ('LIBRARY_PATH', self.spec.prefix.lib64),
- ('LD_LIBRARY_PATH', self.spec.prefix.lib),
- ('LD_LIBRARY_PATH', self.spec.prefix.lib64),
- ('CPATH', self.spec.prefix.include),
- ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib, 'pkgconfig')),
- ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib64, 'pkgconfig'))]:
-
- if os.path.isdir(directory):
- add_path(var, directory)
-
- # Add python path unless it's an actual python installation
- # TODO: is there a better way to do this?
- if self.spec.name != 'python':
- site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
- if site_packages:
- add_path('PYTHONPATH', site_packages[0])
-
- if self.spec.package.extends(spack.spec.Spec('ruby')):
- add_path('GEM_PATH', self.spec.prefix)
-
- # short description is just the package + version
- # TODO: maybe packages can optionally provide it.
- self.short_description = self.spec.format("$_ $@")
-
- # long description is the docstring with reduced whitespace.
- if self.spec.package.__doc__:
- self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
-
- return self._paths
+ def category(self):
+ # Anything defined at the package level takes precedence
+ if hasattr(self.pkg, 'category'):
+ return self.pkg.category
+ # Extensions
+ for extendee in self.pkg.extendees:
+ return '{extendee} extension'.format(extendee=extendee)
+ # Not very descriptive fallback
+ return 'spack installed package'
def write(self):
@@ -156,18 +155,41 @@ class EnvModule(object):
if not os.path.exists(module_dir):
mkdirp(module_dir)
- # If there are no paths, no need for a dotkit.
- if not self.paths:
+ # Environment modifications guessed by inspecting the
+ # installation prefix
+ env = inspect_path(self.spec.prefix)
+
+ # Let the extendee modify their extensions before asking for
+ # package-specific modifications
+ spack_env = EnvironmentModifications()
+ for item in self.pkg.extendees:
+ package = self.spec[item].package
+ package.setup_dependent_package(self.pkg.module, self.spec)
+ package.setup_dependent_environment(spack_env, env, self.spec)
+
+ # Package-specific environment modifications
+ self.spec.package.setup_environment(spack_env, env)
+
+ # TODO : implement site-specific modifications and filters
+ if not env:
return
with open(self.file_name, 'w') as f:
- self._write(f)
+ self.write_header(f)
+ for line in self.process_environment_command(env):
+ f.write(line)
-
- def _write(self, stream):
- """To be implemented by subclasses."""
+ def write_header(self, stream):
raise NotImplementedError()
+ def process_environment_command(self, env):
+ for command in env:
+ try:
+ yield self.formats[type(command)].format(**command.args)
+ except KeyError:
+ tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command)))
+ tty.warn('{context} at {filename}:{lineno}'.format(**command.args))
+
@property
def file_name(self):
@@ -175,14 +197,12 @@ class EnvModule(object):
where this module lives."""
raise NotImplementedError()
-
@property
def use_name(self):
"""Subclasses should implement this to return the name the
module command uses to refer to the package."""
raise NotImplementedError()
-
def remove(self):
mod_file = self.file_name
if os.path.exists(mod_file):
@@ -193,19 +213,23 @@ class Dotkit(EnvModule):
name = 'dotkit'
path = join_path(spack.share_path, "dotkit")
+ formats = {
+ PrependPath: 'dk_alter {name} {value}\n',
+ SetEnv: 'dk_setenv {name} {value}\n'
+ }
+
@property
def file_name(self):
- return join_path(Dotkit.path, self.spec.architecture,
- '%s.dk' % self.use_name)
+ return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name)
@property
def use_name(self):
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
self.spec.compiler.name,
- self.spec.compiler.version,
+ self.spec.compiler.version,
self.spec.dag_hash())
- def _write(self, dk_file):
+ def write_header(self, dk_file):
# Category
if self.category:
dk_file.write('#c %s\n' % self.category)
@@ -219,50 +243,41 @@ class Dotkit(EnvModule):
for line in textwrap.wrap(self.long_description, 72):
dk_file.write("#h %s\n" % line)
- # Path alterations
- for var, dirs in self.paths.items():
- for directory in dirs:
- dk_file.write("dk_alter %s %s\n" % (var, directory))
-
- # Let CMake find this package.
- dk_file.write("dk_alter CMAKE_PREFIX_PATH %s\n" % self.spec.prefix)
-
class TclModule(EnvModule):
name = 'tcl'
path = join_path(spack.share_path, "modules")
+ formats = {
+ PrependPath: 'prepend-path {name} \"{value}\"\n',
+ AppendPath: 'append-path {name} \"{value}\"\n',
+ RemovePath: 'remove-path {name} \"{value}\"\n',
+ SetEnv: 'setenv {name} \"{value}\"\n',
+ UnsetEnv: 'unsetenv {name}\n'
+ }
+
@property
def file_name(self):
return join_path(TclModule.path, self.spec.architecture, self.use_name)
-
@property
def use_name(self):
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
self.spec.compiler.name,
- self.spec.compiler.version,
+ self.spec.compiler.version,
self.spec.dag_hash())
-
- def _write(self, m_file):
- # TODO: cateogry?
- m_file.write('#%Module1.0\n')
-
+ def write_header(self, module_file):
+ # TCL Modulefile header
+ module_file.write('#%Module1.0\n')
+ # TODO : category ?
# Short description
if self.short_description:
- m_file.write('module-whatis \"%s\"\n\n' % self.short_description)
+ module_file.write('module-whatis \"%s\"\n\n' % self.short_description)
# Long description
if self.long_description:
- m_file.write('proc ModulesHelp { } {\n')
+ module_file.write('proc ModulesHelp { } {\n')
doc = re.sub(r'"', '\"', self.long_description)
- m_file.write("puts stderr \"%s\"\n" % doc)
- m_file.write('}\n\n')
-
- # Path alterations
- for var, dirs in self.paths.items():
- for directory in dirs:
- m_file.write("prepend-path %s \"%s\"\n" % (var, directory))
-
- m_file.write("prepend-path CMAKE_PREFIX_PATH \"%s\"\n" % self.spec.prefix)
+ module_file.write("puts stderr \"%s\"\n" % doc)
+ module_file.write('}\n\n')
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 02fb3e5834..9af3221837 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -34,40 +34,34 @@ rundown on spack and how it differs from homebrew, look at the
README.
"""
import os
-import errno
import re
-import shutil
-import time
-import itertools
-import subprocess
-import platform as py_platform
-import multiprocessing
-from urlparse import urlparse, urljoin
import textwrap
-from StringIO import StringIO
+import time
+import glob
import llnl.util.tty as tty
-from llnl.util.tty.log import log_output
-from llnl.util.link_tree import LinkTree
-from llnl.util.filesystem import *
-from llnl.util.lang import *
-
import spack
-import spack.error
+import spack.build_environment
import spack.compilers
-import spack.mirror
-import spack.hooks
import spack.directives
+import spack.error
+import spack.fetch_strategy as fs
+import spack.hooks
+import spack.mirror
import spack.repository
-import spack.build_environment
import spack.url
import spack.util.web
-import spack.fetch_strategy as fs
-from spack.version import *
+from StringIO import StringIO
+from llnl.util.filesystem import *
+from llnl.util.lang import *
+from llnl.util.link_tree import LinkTree
+from llnl.util.tty.log import log_output
from spack.stage import Stage, ResourceStage, StageComposite
-from spack.util.compression import allowed_archive, extension
-from spack.util.executable import ProcessError
+from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment
+from spack.util.executable import ProcessError
+from spack.version import *
+from urlparse import urlparse
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -318,16 +312,17 @@ class Package(object):
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
- """List of prefix-relative file paths. If these do not exist after
- install, or if they exist but are not files, sanity checks fail.
+ """List of prefix-relative file paths (or a single path). If these do
+ not exist after install, or if they exist but are not files,
+ sanity checks fail.
"""
- sanity_check_files = []
+ sanity_check_is_file = []
- """List of prefix-relative directory paths. If these do not exist
- after install, or if they exist but are not directories, sanity
- checks will fail.
+ """List of prefix-relative directory paths (or a single path). If
+ these do not exist after install, or if they exist but are not
+ directories, sanity checks will fail.
"""
- sanity_check_dirs = []
+ sanity_check_is_dir = []
def __init__(self, spec):
@@ -966,14 +961,17 @@ class Package(object):
def sanity_check_prefix(self):
"""This function checks whether install succeeded."""
def check_paths(path_list, filetype, predicate):
+ if isinstance(path_list, basestring):
+ path_list = [path_list]
+
for path in path_list:
abs_path = os.path.join(self.prefix, path)
if not predicate(abs_path):
raise InstallError("Install failed for %s. No such %s in prefix: %s"
% (self.name, filetype, path))
- check_paths(self.sanity_check_files, 'file', os.path.isfile)
- check_paths(self.sanity_check_dirs, 'directory', os.path.isdir)
+ check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
+ check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
installed = set(os.listdir(self.prefix))
installed.difference_update(spack.install_layout.hidden_file_paths)
@@ -1004,38 +1002,127 @@ class Package(object):
return __import__(self.__class__.__module__,
fromlist=[self.__class__.__name__])
+ def setup_environment(self, spack_env, run_env):
+ """Set up the compile and runtime environemnts for a package.
- def setup_dependent_environment(self, module, spec, dependent_spec):
- """Called before the install() method of dependents.
+ `spack_env` and `run_env` are `EnvironmentModifications`
+ objects. Package authors can call methods on them to alter
+ the environment within Spack and at runtime.
+
+ Both `spack_env` and `run_env` are applied within the build
+ process, before this package's `install()` method is called.
+
+ Modifications in `run_env` will *also* be added to the
+ generated environment modules for this package.
Default implementation does nothing, but this can be
- overridden by an extendable package to set up the install
- environment for its extensions. This is useful if there are
- some common steps to installing all extensions for a
- certain package.
+ overridden if the package needs a particular environment.
- Some examples:
+ Examples:
- 1. Installing python modules generally requires PYTHONPATH to
- point to the lib/pythonX.Y/site-packages directory in the
- module's install prefix. This could set that variable.
+ 1. Qt extensions need `QTDIR` set.
- 2. Extensions often need to invoke the 'python' interpreter
- from the Python installation being extended. This routine can
- put a 'python' Execuable object in the module scope for the
- extension package to simplify extension installs.
+ Args:
+ spack_env (EnvironmentModifications): list of
+ modifications to be applied when this package is built
+ within Spack.
- 3. A lot of Qt extensions need QTDIR set. This can be used to do that.
+ run_env (EnvironmentModifications): list of environment
+ changes to be applied when this package is run outside
+ of Spack.
"""
pass
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ """Set up the environment of packages that depend on this one.
+
+ This is similar to `setup_environment`, but it is used to
+ modify the compile and runtime environments of packages that
+ *depend* on this one. This gives packages like Python and
+ others that follow the extension model a way to implement
+ common environment or compile-time settings for dependencies.
+
+ By default, this delegates to self.setup_environment()
+
+ Example :
+
+ 1. Installing python modules generally requires
+ `PYTHONPATH` to point to the lib/pythonX.Y/site-packages
+ directory in the module's install prefix. This could
+ set that variable.
+
+ Args:
+
+ spack_env (EnvironmentModifications): list of
+ modifications to be applied when the dependent package
+ is bulit within Spack.
+
+ run_env (EnvironmentModifications): list of environment
+ changes to be applied when the dependent package is
+ run outside of Spack.
+
+ dependent_spec (Spec): The spec of the dependent package
+ about to be built. This allows the extendee (self) to
+ query the dependent's state. Note that *this*
+ package's spec is available as `self.spec`.
+
+ This is useful if there are some common steps to installing
+ all extensions for a certain package.
+
+ """
+ self.setup_environment(spack_env, run_env)
+
+
+ def setup_dependent_package(self, module, dependent_spec):
+ """Set up Python module-scope variables for dependent packages.
+
+ Called before the install() method of dependents.
+
+ Default implementation does nothing, but this can be
+ overridden by an extendable package to set up the module of
+ its extensions. This is useful if there are some common steps
+ to installing all extensions for a certain package.
+
+ Example :
+
+ 1. Extensions often need to invoke the `python`
+ interpreter from the Python installation being
+ extended. This routine can put a 'python' Executable
+ object in the module scope for the extension package to
+ simplify extension installs.
+
+ 2. MPI compilers could set some variables in the
+ dependent's scope that point to `mpicc`, `mpicxx`,
+ etc., allowing them to be called by common names
+ regardless of which MPI is used.
+
+ 3. BLAS/LAPACK implementations can set some variables
+ indicating the path to their libraries, since these
+ paths differ by BLAS/LAPACK implementation.
+
+ Args:
+
+ module (module): The Python `module` object of the
+ dependent package. Packages can use this to set
+ module-scope variables for the dependent to use.
+
+ dependent_spec (Spec): The spec of the dependent package
+ about to be built. This allows the extendee (self) to
+ query the dependent's state. Note that *this*
+ package's spec is available as `self.spec`.
+
+ This is useful if there are some common steps to installing
+ all extensions for a certain package.
+
+ """
+ pass
+
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
raise InstallError("Package %s provides no install method!" % self.name)
-
def do_uninstall(self, force=False):
if not self.installed:
raise InstallError(str(self.spec) + " is not installed.")
@@ -1239,6 +1326,27 @@ class Package(object):
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
+def install_dependency_symlinks(pkg, spec, prefix):
+ """Execute a dummy install and flatten dependencies"""
+ flatten_dependencies(spec, prefix)
+
+def flatten_dependencies(spec, flat_dir):
+ """Make each dependency of spec present in dir via symlink."""
+ for dep in spec.traverse(root=False):
+ name = dep.name
+
+ dep_path = spack.install_layout.path_for_spec(dep)
+ dep_files = LinkTree(dep_path)
+
+ os.mkdir(flat_dir+'/'+name)
+
+ conflict = dep_files.find_conflict(flat_dir+'/'+name)
+ if conflict:
+ raise DependencyConflictError(conflict)
+
+ dep_files.merge(flat_dir+'/'+name)
+
+
def validate_package_url(url_string):
"""Determine whether spack can handle a particular URL or not."""
url = urlparse(url_string)
@@ -1326,6 +1434,10 @@ class InstallError(spack.error.SpackError):
super(InstallError, self).__init__(message, long_msg)
+class ExternalPackageError(InstallError):
+ """Raised by install() when a package is only for external use."""
+
+
class PackageStillNeededError(InstallError):
"""Raised when package is still needed by another on uninstall."""
def __init__(self, spec, dependents):
@@ -1376,3 +1488,11 @@ class ExtensionConflictError(ExtensionError):
class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)
+
+
+class DependencyConflictError(spack.error.SpackError):
+ """Raised when the dependencies cannot be flattened as asked for."""
+ def __init__(self, conflict):
+ super(DependencyConflictError, self).__init__(
+ "%s conflicts with another file in the flattened directory." %(
+ conflict))
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index d5d8b64765..cd842561e6 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -66,7 +66,8 @@ test_names = ['versions',
'database',
'namespace_trie',
'yaml',
- 'sbang']
+ 'sbang',
+ 'environment']
def list_tests():
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index f264faf17a..9cd8c969ae 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -24,6 +24,7 @@
##############################################################################
import spack
from spack.spec import Spec, CompilerSpec
+from spack.version import ver
from spack.concretize import find_spec
from spack.test.mock_packages_test import *
@@ -77,6 +78,14 @@ class ConcretizeTest(MockPackagesTest):
self.check_concretize('mpich')
+ def test_concretize_preferred_version(self):
+ spec = self.check_concretize('python')
+ self.assertEqual(spec.versions, ver('2.7.11'))
+
+ spec = self.check_concretize('python@3.5.1')
+ self.assertEqual(spec.versions, ver('3.5.1'))
+
+
def test_concretize_with_virtual(self):
self.check_concretize('mpileaks ^mpi')
self.check_concretize('mpileaks ^mpi@:1.1')
@@ -309,3 +318,10 @@ class ConcretizeTest(MockPackagesTest):
Spec('d')),
Spec('e'))
self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s))
+
+
+ def test_compiler_child(self):
+ s = Spec('mpileaks%clang ^dyninst%gcc')
+ s.concretize()
+ self.assertTrue(s['mpileaks'].satisfies('%clang'))
+ self.assertTrue(s['dyninst'].satisfies('%gcc'))
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 9a57e1f03e..ce6e8a0552 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -26,6 +26,7 @@
These tests check the database is functioning properly,
both in memory and in its file
"""
+import os.path
import multiprocessing
import shutil
import tempfile
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
new file mode 100644
index 0000000000..6c8f5ea43c
--- /dev/null
+++ b/lib/spack/spack/test/environment.py
@@ -0,0 +1,73 @@
+import unittest
+import os
+from spack.environment import EnvironmentModifications
+
+
+class EnvironmentTest(unittest.TestCase):
+ def setUp(self):
+ os.environ.clear()
+ os.environ['UNSET_ME'] = 'foo'
+ os.environ['EMPTY_PATH_LIST'] = ''
+ os.environ['PATH_LIST'] = '/path/second:/path/third'
+ os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+
+ def test_set(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value')
+ env.set('B', 3)
+ env.apply_modifications()
+ self.assertEqual('dummy value', os.environ['A'])
+ self.assertEqual(str(3), os.environ['B'])
+
+ def test_unset(self):
+ env = EnvironmentModifications()
+ self.assertEqual('foo', os.environ['UNSET_ME'])
+ env.unset('UNSET_ME')
+ env.apply_modifications()
+ self.assertRaises(KeyError, os.environ.__getitem__, 'UNSET_ME')
+
+ def test_set_path(self):
+ env = EnvironmentModifications()
+ env.set_path('A', ['foo', 'bar', 'baz'])
+ env.apply_modifications()
+ self.assertEqual('foo:bar:baz', os.environ['A'])
+
+ def test_path_manipulation(self):
+ env = EnvironmentModifications()
+
+ env.append_path('PATH_LIST', '/path/last')
+ env.prepend_path('PATH_LIST', '/path/first')
+
+ env.append_path('EMPTY_PATH_LIST', '/path/middle')
+ env.append_path('EMPTY_PATH_LIST', '/path/last')
+ env.prepend_path('EMPTY_PATH_LIST', '/path/first')
+
+ env.append_path('NEWLY_CREATED_PATH_LIST', '/path/middle')
+ env.append_path('NEWLY_CREATED_PATH_LIST', '/path/last')
+ env.prepend_path('NEWLY_CREATED_PATH_LIST', '/path/first')
+
+ env.remove_path('REMOVE_PATH_LIST', '/remove/this')
+ env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
+
+ env.apply_modifications()
+ self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST'])
+ self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST'])
+ self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST'])
+ self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST'])
+
+ def test_extra_arguments(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value', who='Pkg1')
+ for x in env:
+ assert 'who' in x.args
+ env.apply_modifications()
+ self.assertEqual('dummy value', os.environ['A'])
+
+ def test_extend(self):
+ env = EnvironmentModifications()
+ env.set('A', 'dummy value')
+ env.set('B', 3)
+ copy_construct = EnvironmentModifications(env)
+ self.assertEqual(len(copy_construct), 2)
+ for x, y in zip(env, copy_construct):
+ assert x is y
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index ad551a6ded..f51f05cad7 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -142,7 +142,7 @@ def split_url_extension(path):
def downloaded_file_extension(path):
"""This returns the type of archive a URL refers to. This is
- sometimes confusing becasue of URLs like:
+ sometimes confusing because of URLs like:
(1) https://github.com/petdance/ack/tarball/1.93_02
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index ea1f233bce..5ae5867428 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -27,13 +27,12 @@ import os
from itertools import product
from spack.util.executable import which
-# Supported archvie extensions.
+# Supported archive extensions.
PRE_EXTS = ["tar"]
EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"]
-# Add EXTS last so that .tar.gz is matched *before* tar.gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + EXTS
-
+# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
+ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
def allowed_archive(path):
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index ae8e5708be..55e653fd2f 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -59,14 +59,8 @@ def path_put_first(var_name, directories):
path_set(var_name, new_path)
-def pop_keys(dictionary, *keys):
- for key in keys:
- if key in dictionary:
- dictionary.pop(key)
-
-
def dump_environment(path):
"""Dump the current environment out to a file."""
with open(path, 'w') as env_file:
- for key,val in sorted(os.environ.items()):
+ for key, val in sorted(os.environ.items()):
env_file.write("%s=%s\n" % (key, val))
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index 586a5b836b..764af68400 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -141,7 +141,7 @@ function _spack_pathadd {
fi
# Do the actual prepending here.
- eval "_pa_oldvalue=\$${_pa_varname}"
+ eval "_pa_oldvalue=\${${_pa_varname}:-}"
if [ -d "$_pa_new_path" ] && [[ ":$_pa_oldvalue:" != *":$_pa_new_path:"* ]]; then
if [ -n "$_pa_oldvalue" ]; then
diff --git a/var/spack/repos/builtin.mock/packages/python/package.py b/var/spack/repos/builtin.mock/packages/python/package.py
new file mode 100644
index 0000000000..c5fed52f53
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/python/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class Python(Package):
+ """Dummy Python package to demonstrate preferred versions."""
+ homepage = "http://www.python.org"
+ url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
+
+ extendable = True
+
+ version('3.5.1', 'be78e48cdfc1a7ad90efff146dce6cfe')
+ version('3.5.0', 'a56c0c0b45d75a0ec9c6dee933c41c36')
+ version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b', preferred=True)
+ version('2.7.10', 'd7547558fd673bd9d38e2108c6b42521')
+ version('2.7.9', '5eebcaa0030dc4061156d3429657fb83')
+ version('2.7.8', 'd4bca0159acb0b44a781292b5231936f')
+
+ def install(self, spec, prefix):
+ pass
+
diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py
index 0b49d14202..dd86b17a53 100644
--- a/var/spack/repos/builtin/packages/arpack-ng/package.py
+++ b/var/spack/repos/builtin/packages/arpack-ng/package.py
@@ -35,6 +35,10 @@ class ArpackNg(Package):
variant('shared', default=True, description='Enables the build of shared libraries')
variant('mpi', default=False, description='Activates MPI support')
+ # The function pdlamch10 does not set the return variable. This is fixed upstream
+ # see https://github.com/opencollab/arpack-ng/issues/34
+ patch('pdlamch10.patch', when='@3.3:')
+
depends_on('blas')
depends_on('lapack')
depends_on('mpi', when='+mpi')
@@ -46,7 +50,10 @@ class ArpackNg(Package):
options = ['--prefix=%s' % prefix]
if '+mpi' in spec:
- options.append('--enable-mpi')
+ options.extend([
+ '--enable-mpi',
+ 'F77=mpif77' #FIXME: avoid hardcoding MPI wrapper names
+ ])
if '~shared' in spec:
options.append('--enable-shared=no')
diff --git a/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch b/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch
new file mode 100644
index 0000000000..922828909f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/arpack-ng/pdlamch10.patch
@@ -0,0 +1,15 @@
+diff --git a/PARPACK/SRC/MPI/pdlamch10.f b/PARPACK/SRC/MPI/pdlamch10.f
+index 6571da9..2882c2e 100644
+--- a/PARPACK/SRC/MPI/pdlamch10.f
++++ b/PARPACK/SRC/MPI/pdlamch10.f
+@@ -86,8 +86,8 @@
+ TEMP = TEMP1
+ END IF
+ *
+- PDLAMCH = TEMP
++ PDLAMCH10 = TEMP
+ *
+-* End of PDLAMCH
++* End of PDLAMCH10
+ *
+ END
diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py
index fc683363a7..b5504122b7 100644
--- a/var/spack/repos/builtin/packages/atlas/package.py
+++ b/var/spack/repos/builtin/packages/atlas/package.py
@@ -1,31 +1,36 @@
from spack import *
from spack.util.executable import Executable
-import os
+import os.path
class Atlas(Package):
"""
- Automatically Tuned Linear Algebra Software, generic shared
- ATLAS is an approach for the automatic generation and optimization of
- numerical software. Currently ATLAS supplies optimized versions for the
- complete set of linear algebra kernels known as the Basic Linear Algebra
- Subroutines (BLAS), and a subset of the linear algebra routines in the
- LAPACK library.
+ Automatically Tuned Linear Algebra Software, generic shared ATLAS is an approach for the automatic generation and
+ optimization of numerical software. Currently ATLAS supplies optimized versions for the complete set of linear
+ algebra kernels known as the Basic Linear Algebra Subroutines (BLAS), and a subset of the linear algebra routines
+ in the LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
+ version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da',
+ url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True)
+ resource(name='lapack',
+ url='http://www.netlib.org/lapack/lapack-3.5.0.tgz',
+ md5='b1d3e3e425b2e44a06760ff173104bdf',
+ destination='spack-resource-lapack',
+ when='@3:')
+
version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825',
url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download')
- version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da',
- url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2')
- # TODO: make this provide BLAS once it works better. Create a way
- # TODO: to mark "beta" packages and require explicit invocation.
+ variant('shared', default=True, description='Builds shared library')
- # provides('blas')
+ provides('blas')
+ provides('lapack')
+ parallel = False
def patch(self):
- # Disable thraed check. LLNL's environment does not allow
+ # Disable thread check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
@@ -33,26 +38,21 @@ class Atlas(Package):
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
- @when('@:3.10')
def install(self, spec, prefix):
- with working_dir('ATLAS-Build', create=True):
- configure = Executable('../configure')
- configure('--prefix=%s' % prefix, '-C', 'ic', 'cc', '-C', 'if', 'f77', "--dylibs")
- make()
- make('check')
- make('ptcheck')
- make('time')
- make("install")
+ options = []
+ if '+shared' in spec:
+ options.append('--shared')
- def install(self, spec, prefix):
- with working_dir('ATLAS-Build', create=True):
- configure = Executable('../configure')
- configure('--incdir=%s' % prefix.include,
- '--libdir=%s' % prefix.lib,
- '--cc=cc',
- "--shared")
+ # Lapack resource
+ lapack_stage = self.stage[1]
+ lapack_tarfile = os.path.basename(lapack_stage.fetcher.url)
+ lapack_tarfile_path = join_path(lapack_stage.path, lapack_tarfile)
+ options.append('--with-netlib-lapack-tarfile=%s' % lapack_tarfile_path)
+ with working_dir('spack-build', create=True):
+ configure = Executable('../configure')
+ configure('--prefix=%s' % prefix, *options)
make()
make('check')
make('ptcheck')
diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index cc93c7067c..1f93d39769 100644
--- a/var/spack/repos/builtin/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -38,10 +38,12 @@ class Cmake(Package):
version('2.8.10.2', '097278785da7182ec0aea8769d06860c')
variant('ncurses', default=True, description='Enables the build of the ncurses gui')
+ variant('openssl', default=True, description="Enables CMake's OpenSSL features")
variant('qt', default=False, description='Enables the build of cmake-gui')
variant('doc', default=False, description='Enables the generation of html and man page documentation')
depends_on('ncurses', when='+ncurses')
+ depends_on('openssl', when='+openssl')
depends_on('qt', when='+qt')
depends_on('python@2.7.11:', when='+doc')
depends_on('py-sphinx', when='+doc')
@@ -77,8 +79,9 @@ class Cmake(Package):
options.append('--sphinx-html')
options.append('--sphinx-man')
- options.append('--')
- options.append('-DCMAKE_USE_OPENSSL=ON')
+ if '+openssl' in spec:
+ options.append('--')
+ options.append('-DCMAKE_USE_OPENSSL=ON')
configure(*options)
make()
diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py
index 1693c4b160..bc83cb2b65 100644
--- a/var/spack/repos/builtin/packages/cryptopp/package.py
+++ b/var/spack/repos/builtin/packages/cryptopp/package.py
@@ -8,8 +8,8 @@ class Cryptopp(Package):
public-key encryption (RSA, DSA), and a few obsolete/historical encryption
algorithms (MD5, Panama)."""
- homepage = "http://www.cryptopp.com/"
- url = "http://www.cryptopp.com/cryptopp563.zip"
+ homepage = "http://www.cryptopp.com"
+ base_url = "http://www.cryptopp.com"
version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
@@ -25,7 +25,5 @@ class Cryptopp(Package):
install('libcryptopp.a', prefix.lib)
def url_for_version(self, version):
- version_tuple = tuple(v for v in iter(version))
- version_string = reduce(lambda vs, nv: vs + str(nv), version_tuple, "")
-
- return "%scryptopp%s.zip" % (Cryptopp.homepage, version_string)
+ version_string = str(version).replace('.', '')
+ return '%s/cryptopp%s.zip' % (Cryptopp.base_url, version_string)
diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py
index 9e684445c7..ab6305fc08 100644
--- a/var/spack/repos/builtin/packages/curl/package.py
+++ b/var/spack/repos/builtin/packages/curl/package.py
@@ -7,6 +7,7 @@ class Curl(Package):
homepage = "http://curl.haxx.se"
url = "http://curl.haxx.se/download/curl-7.46.0.tar.bz2"
+ version('7.47.1', '9ea3123449439bbd960cd25cf98796fb')
version('7.46.0', '9979f989a2a9930d10f1b3deeabc2148')
version('7.45.0', '62c1a352b28558f25ba6209214beadc8')
version('7.44.0', '6b952ca00e5473b16a11f05f06aa8dae')
diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py
index e40046b452..8d6e672f86 100644
--- a/var/spack/repos/builtin/packages/eigen/package.py
+++ b/var/spack/repos/builtin/packages/eigen/package.py
@@ -48,7 +48,7 @@ class Eigen(Package):
depends_on('metis', when='+metis')
depends_on('scotch', when='+scotch')
depends_on('fftw', when='+fftw')
- depends_on('SuiteSparse', when='+suitesparse')
+ depends_on('suite-sparse', when='+suitesparse')
depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
depends_on('gmp')
diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py
index a2bf58f585..59f362ab46 100644
--- a/var/spack/repos/builtin/packages/espresso/package.py
+++ b/var/spack/repos/builtin/packages/espresso/package.py
@@ -32,6 +32,10 @@ class Espresso(Package):
if '+elpa' in spec and ('~mpi' in spec or '~scalapack' in spec):
raise RuntimeError(error.format(variant='elpa'))
+ def setup_environment(self, spack_env, run_env):
+ # Espresso copies every executable in prefix without creating sub-folders
+ run_env.prepend_path('PATH', self.prefix)
+
def install(self, spec, prefix):
self.check_variants(spec)
diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py
new file mode 100644
index 0000000000..05712d7392
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gettext/package.py
@@ -0,0 +1,30 @@
+from spack import *
+
+class Gettext(Package):
+ """GNU internationalization (i18n) and localization (l10n) library."""
+ homepage = "https://www.gnu.org/software/gettext/"
+ url = "http://ftpmirror.gnu.org/gettext/gettext-0.19.7.tar.xz"
+
+ version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5')
+
+ def install(self, spec, prefix):
+ options = ['--disable-dependency-tracking',
+ '--disable-silent-rules',
+ '--disable-debug',
+ '--prefix=%s' % prefix,
+ '--with-included-gettext',
+ '--with-included-glib',
+ '--with-included-libcroco',
+ '--with-included-libunistring',
+ '--with-emacs',
+ '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share,
+ '--disable-java',
+ '--disable-csharp',
+ '--without-git', # Don't use VCS systems to create these archives
+ '--without-cvs',
+ '--without-xz']
+
+ configure(*options)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py
index 242ee100d7..8d93d48d1f 100644
--- a/var/spack/repos/builtin/packages/hypre/package.py
+++ b/var/spack/repos/builtin/packages/hypre/package.py
@@ -1,5 +1,5 @@
from spack import *
-import os
+import os, sys
class Hypre(Package):
"""Hypre is a library of high performance preconditioners that
@@ -12,7 +12,8 @@ class Hypre(Package):
version('2.10.1', 'dc048c4cabb3cd549af72591474ad674')
version('2.10.0b', '768be38793a35bb5d055905b271f5b8e')
- variant('shared', default=True, description="Build shared library version (disables static library)")
+ # hypre does not know how to build shared libraries on Darwin
+ variant('shared', default=sys.platform!='darwin', description="Build shared library version (disables static library)")
depends_on("mpi")
depends_on("blas")
diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py
index 0fcb56c164..29bc21b65c 100644
--- a/var/spack/repos/builtin/packages/libelf/package.py
+++ b/var/spack/repos/builtin/packages/libelf/package.py
@@ -38,9 +38,6 @@ class Libelf(Package):
provides('elf')
- sanity_check_files = ['include/libelf.h']
- sanity_check_dirs = ['lib']
-
def install(self, spec, prefix):
configure("--prefix=" + prefix,
"--enable-shared",
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index 280e400f69..1d25d59e50 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -52,7 +52,7 @@ class Llvm(Package):
depends_on('cmake @2.8.12.2:')
# Universal dependency
- depends_on('python@2.7:')
+ depends_on('python@2.7:2.8') # Seems not to support python 3.X.Y
# lldb dependencies
depends_on('ncurses', when='+lldb')
@@ -133,6 +133,21 @@ class Llvm(Package):
}
},
{
+ 'version' : '3.8.0',
+ 'md5':'07a7a74f3c6bd65de4702bf941b511a0',
+ 'resources' : {
+ 'compiler-rt' : 'd6fcbe14352ffb708e4d1ac2e48bb025',
+ 'openmp' : '8fd7cc35d48051613cf1e750e9f22e40',
+ 'polly' : '1b3b20f52d34a4024e21a4ea7112caa7',
+ 'libcxx' : 'd6e0bdbbee39f7907ad74fd56d03b88a',
+ 'libcxxabi' : 'bbe6b4d72c7c5978550d370af529bcf7',
+ 'clang' : 'cc99e7019bb74e6459e80863606250c5',
+ 'clang-tools-extra' : 'c2344f50e0eea0b402f0092a80ddc036',
+ 'lldb' : 'a5da35ed9cc8c8817ee854e3dbfba00e',
+ 'llvm-libunwind' : '162ade468607f153cca12be90b5194fa',
+ }
+ },
+ {
'version' : '3.7.1',
'md5':'bf8b3a2c79e61212c5409041dfdbd319',
'resources' : {
diff --git a/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch b/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch
new file mode 100644
index 0000000000..b182b167b9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch
@@ -0,0 +1,22 @@
+# HG changeset patch
+# User Sean Farley <sean@mcs.anl.gov>
+# Date 1332269671 18000
+# Tue Mar 20 13:54:31 2012 -0500
+# Node ID b95c0c2e1d8bf8e3273f7d45e856f0c0127d998e
+# Parent 88049269953c67c3fdcc4309bf901508a875f0dc
+cmake: add gklib headers to install into include
+
+diff -r 88049269953c -r b95c0c2e1d8b libmetis/CMakeLists.txt
+Index: libmetis/CMakeLists.txt
+===================================================================
+--- a/libmetis/CMakeLists.txt Tue Mar 20 13:54:29 2012 -0500
++++ b/libmetis/CMakeLists.txt Tue Mar 20 13:54:31 2012 -0500
+@@ -12,6 +12,8 @@ endif()
+ if(METIS_INSTALL)
+ install(TARGETS metis
+ LIBRARY DESTINATION lib
+ RUNTIME DESTINATION lib
+ ARCHIVE DESTINATION lib)
++ install(FILES gklib_defs.h DESTINATION include)
++ install(FILES gklib_rename.h DESTINATION include)
+ endif()
diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py
index bbfc4de7d1..68b9f6fd30 100644
--- a/var/spack/repos/builtin/packages/metis/package.py
+++ b/var/spack/repos/builtin/packages/metis/package.py
@@ -24,7 +24,7 @@
##############################################################################
from spack import *
-
+import glob
class Metis(Package):
"""
@@ -49,6 +49,8 @@ class Metis(Package):
depends_on('gdb', when='+gdb')
+ patch('install_gklib_defs_rename.patch')
+
def install(self, spec, prefix):
options = []
@@ -80,4 +82,11 @@ class Metis(Package):
with working_dir(build_directory, create=True):
cmake(source_directory, *options)
make()
- make("install") \ No newline at end of file
+ make("install")
+
+ # install GKlib headers, which will be needed for ParMETIS
+ GKlib_dist = join_path(prefix.include,'GKlib')
+ mkdirp(GKlib_dist)
+ fs = glob.glob(join_path(source_directory,'GKlib',"*.h"))
+ for f in fs:
+ install(f, GKlib_dist)
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index e2b3654c19..b20dc8dd60 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class Mpich(Package):
"""MPICH is a high performance and widely portable implementation of
the Message Passing Interface (MPI) standard."""
@@ -46,14 +47,16 @@ class Mpich(Package):
provides('mpi@:3.0', when='@3:')
provides('mpi@:1.3', when='@1:')
- def setup_dependent_environment(self, module, spec, dep_spec):
- """For dependencies, make mpicc's use spack wrapper."""
- os.environ['MPICH_CC'] = os.environ['CC']
- os.environ['MPICH_CXX'] = os.environ['CXX']
- os.environ['MPICH_F77'] = os.environ['F77']
- os.environ['MPICH_F90'] = os.environ['FC']
- os.environ['MPICH_FC'] = os.environ['FC']
+ def setup_dependent_environment(self, env, dependent_spec):
+ env.set('MPICH_CC', spack_cc)
+ env.set('MPICH_CXX', spack_cxx)
+ env.set('MPICH_F77', spack_f77)
+ env.set('MPICH_F90', spack_f90)
+ env.set('MPICH_FC', spack_fc)
+ def setup_dependent_package(self, module, dep_spec):
+ """For dependencies, make mpicc's use spack wrapper."""
+ # FIXME : is this necessary ? Shouldn't this be part of a contract with MPI providers?
module.mpicc = join_path(self.prefix.bin, 'mpicc')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py
index 44a37903cc..5a254dfd00 100644
--- a/var/spack/repos/builtin/packages/mumps/package.py
+++ b/var/spack/repos/builtin/packages/mumps/package.py
@@ -20,10 +20,10 @@ class Mumps(Package):
variant('complex', default=True, description='Activate the compilation of cmumps and/or zmumps')
variant('idx64', default=False, description='Use int64_t/integer*8 as default index type')
-
+
depends_on('scotch + esmumps', when='~ptscotch+scotch')
depends_on('scotch + esmumps + mpi', when='+ptscotch')
- depends_on('metis', when='~parmetis+metis')
+ depends_on('metis', when='+metis')
depends_on('parmetis', when="+parmetis")
depends_on('blas')
depends_on('lapack')
@@ -38,11 +38,11 @@ class Mumps(Package):
def write_makefile_inc(self):
if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec:
raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi')
-
+
makefile_conf = ["LIBBLAS = -L%s -lblas" % self.spec['blas'].prefix.lib]
orderings = ['-Dpord']
-
+
if '+ptscotch' in self.spec or '+scotch' in self.spec:
join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '')
makefile_conf.extend(
@@ -54,15 +54,19 @@ class Mumps(Package):
if '+ptscotch' in self.spec:
orderings.append('-Dptscotch')
- if '+parmetis' in self.spec or '+metis' in self.spec:
+ if '+parmetis' in self.spec and '+metis' in self.spec:
libname = 'parmetis' if '+parmetis' in self.spec else 'metis'
makefile_conf.extend(
- ["IMETIS = -I%s" % self.spec[libname].prefix.include,
- "LMETIS = -L%s -l%s" % (self.spec[libname].prefix.lib, libname)])
+ ["IMETIS = -I%s" % self.spec['parmetis'].prefix.include,
+ "LMETIS = -L%s -l%s -L%s -l%s" % (self.spec['parmetis'].prefix.lib, 'parmetis',self.spec['metis'].prefix.lib, 'metis')])
+
+ orderings.append('-Dparmetis')
+ elif '+metis' in self.spec:
+ makefile_conf.extend(
+ ["IMETIS = -I%s" % self.spec['metis'].prefix.include,
+ "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')])
orderings.append('-Dmetis')
- if '+parmetis' in self.spec:
- orderings.append('-Dparmetis')
makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings)))
@@ -101,12 +105,12 @@ class Mumps(Package):
# compiler possible values are -DAdd_, -DAdd__ and/or -DUPPER
makefile_conf.append("CDEFS = -DAdd_")
-
+
makefile_inc_template = join_path(os.path.dirname(self.module.__file__),
'Makefile.inc')
with open(makefile_inc_template, "r") as fh:
makefile_conf.extend(fh.read().split('\n'))
-
+
with working_dir('.'):
with open("Makefile.inc", "w") as fh:
makefile_inc = '\n'.join(makefile_conf)
@@ -130,8 +134,9 @@ class Mumps(Package):
make_libs.append('zexamples')
self.write_makefile_inc()
-
- make(*make_libs)
+
+ # Build fails in parallel, at least on OS-X
+ make(*make_libs, parallel=False)
install_tree('lib', prefix.lib)
install_tree('include', prefix.include)
diff --git a/var/spack/repos/builtin/packages/muparser/package.py b/var/spack/repos/builtin/packages/muparser/package.py
new file mode 100644
index 0000000000..a1a9ff90e5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/muparser/package.py
@@ -0,0 +1,18 @@
+from spack import *
+
+class Muparser(Package):
+ """C++ math expression parser library."""
+ homepage = "http://muparser.beltoforion.de/"
+ url = "https://github.com/beltoforion/muparser/archive/v2.2.5.tar.gz"
+
+ version('2.2.5', '02dae671aa5ad955fdcbcd3fee313fb7')
+
+ def install(self, spec, prefix):
+ options = ['--disable-debug',
+ '--disable-dependency-tracking',
+ '--prefix=%s' % prefix]
+
+ configure(*options)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
index af5ed1b088..e4e95f92af 100644
--- a/var/spack/repos/builtin/packages/mvapich2/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -123,7 +123,7 @@ class Mvapich2(Package):
count += 1
if count > 1:
raise RuntimeError('network variants are mutually exclusive (only one can be selected at a time)')
-
+ network_options = []
# From here on I can suppose that only one variant has been selected
if self.enabled(Mvapich2.PSM) in spec:
network_options = ["--with-device=ch3:psm"]
diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py
index 227362399a..b60a2c4e9a 100644
--- a/var/spack/repos/builtin/packages/netcdf/package.py
+++ b/var/spack/repos/builtin/packages/netcdf/package.py
@@ -43,6 +43,13 @@ class Netcdf(Package):
"--enable-dap"
]
+ # Make sure Netcdf links against Spack's curl
+ # Otherwise it may pick up system's curl, which could lead to link errors:
+ # /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0'
+ LIBS.append("-lcurl")
+ CPPFLAGS.append("-I%s" % spec['curl'].prefix.include)
+ LDFLAGS.append ("-L%s" % spec['curl'].prefix.lib)
+
if '+mpi' in spec:
config_args.append('--enable-parallel4')
diff --git a/var/spack/repos/builtin/packages/netlib-blas/package.py b/var/spack/repos/builtin/packages/netlib-blas/package.py
deleted file mode 100644
index 85e97323d3..0000000000
--- a/var/spack/repos/builtin/packages/netlib-blas/package.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from spack import *
-import os
-
-
-class NetlibBlas(Package):
- """Netlib reference BLAS"""
- homepage = "http://www.netlib.org/lapack/"
- url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
-
- version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf')
-
- variant('fpic', default=False, description="Build with -fpic compiler option")
-
- # virtual dependency
- provides('blas')
-
- # Doesn't always build correctly in parallel
- parallel = False
-
- def patch(self):
- os.symlink('make.inc.example', 'make.inc')
-
- mf = FileFilter('make.inc')
- mf.filter('^FORTRAN.*', 'FORTRAN = f90')
- mf.filter('^LOADER.*', 'LOADER = f90')
- mf.filter('^CC =.*', 'CC = cc')
-
- if '+fpic' in self.spec:
- mf.filter('^OPTS.*=.*', 'OPTS = -O2 -frecursive -fpic')
- mf.filter('^CFLAGS =.*', 'CFLAGS = -O3 -fpic')
-
-
- def install(self, spec, prefix):
- make('blaslib')
-
- # Tests that blas builds correctly
- make('blas_testing')
-
- # No install provided
- mkdirp(prefix.lib)
- install('librefblas.a', prefix.lib)
-
- # Blas virtual package should provide blas.a and libblas.a
- with working_dir(prefix.lib):
- symlink('librefblas.a', 'blas.a')
- symlink('librefblas.a', 'libblas.a')
diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py
index 741f4af421..c4b7ce3b04 100644
--- a/var/spack/repos/builtin/packages/netlib-lapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py
@@ -1,16 +1,15 @@
from spack import *
+
class NetlibLapack(Package):
"""
- LAPACK version 3.X is a comprehensive FORTRAN library that does
- linear algebra operations including matrix inversions, least
- squared solutions to linear sets of equations, eigenvector
- analysis, singular value decomposition, etc. It is a very
- comprehensive and reputable package that has found extensive
- use in the scientific community.
+ LAPACK version 3.X is a comprehensive FORTRAN library that does linear algebra operations including matrix
+ inversions, least squared solutions to linear sets of equations, eigenvector analysis, singular value
+ decomposition, etc. It is a very comprehensive and reputable package that has found extensive use in the
+ scientific community.
"""
homepage = "http://www.netlib.org/lapack/"
- url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
+ url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
version('3.6.0', 'f2f6c67134e851fe189bb3ca1fbb5101')
version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf')
@@ -19,42 +18,34 @@ class NetlibLapack(Package):
version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70')
version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4')
- variant('shared', default=False, description="Build shared library version")
+ variant('debug', default=False, description='Activates the Debug build type')
+ variant('shared', default=True, description="Build shared library version")
+ variant('external-blas', default=False, description='Build lapack with an external blas')
+
+ variant('lapacke', default=True, description='Activates the build of the LAPACKE C interface')
# virtual dependency
+ provides('blas', when='~external-blas')
provides('lapack')
- # blas is a virtual dependency.
- depends_on('blas')
-
depends_on('cmake')
-
- # Doesn't always build correctly in parallel
- parallel = False
-
- @when('^netlib-blas')
- def get_blas_libs(self):
- blas = self.spec['netlib-blas']
- return [join_path(blas.prefix.lib, 'blas.a')]
-
-
- @when('^atlas')
- def get_blas_libs(self):
- blas = self.spec['atlas']
- return [join_path(blas.prefix.lib, l)
- for l in ('libf77blas.a', 'libatlas.a')]
-
+ depends_on('blas', when='+external-blas')
def install(self, spec, prefix):
- blas_libs = ";".join(self.get_blas_libs())
- cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs]
-
- if '+shared' in spec:
- cmake_args.append('-DBUILD_SHARED_LIBS=ON')
-
- cmake_args += std_cmake_args
-
- cmake(*cmake_args)
- make()
- make("install")
-
+ cmake_args = ['-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'),
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
+ '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')]
+ if '+external-blas' in spec:
+ # TODO : the mechanism to specify the library should be more general,
+ # TODO : but this allows to have an hook to an external blas
+ cmake_args.extend([
+ '-DUSE_OPTIMIZED_BLAS:BOOL=ON',
+ '-DBLAS_LIBRARIES:PATH=%s' % join_path(spec['blas'].prefix.lib, 'libblas.a')
+ ])
+
+ cmake_args.extend(std_cmake_args)
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
index 22d538560e..c3e6822cdf 100644
--- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
@@ -1,8 +1,9 @@
from spack import *
+import sys
class NetlibScalapack(Package):
"""ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines"""
-
+
homepage = "http://www.netlib.org/scalapack/"
url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
@@ -11,16 +12,16 @@ class NetlibScalapack(Package):
version('2.0.0', '9e76ae7b291be27faaad47cfc256cbfe')
# versions before 2.0.0 are not using cmake and requires blacs as
# a separated package
-
+
variant('shared', default=True, description='Build the shared library version')
variant('fpic', default=False, description="Build with -fpic compiler option")
-
+
provides('scalapack')
-
+
depends_on('mpi')
depends_on('lapack')
-
- def install(self, spec, prefix):
+
+ def install(self, spec, prefix):
options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else 'OFF'),
"-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else 'ON'),
@@ -32,19 +33,19 @@ class NetlibScalapack(Package):
"-DCMAKE_C_FLAGS=-fPIC",
"-DCMAKE_Fortran_FLAGS=-fPIC"
])
-
+
options.extend(std_cmake_args)
-
+
with working_dir('spack-build', create=True):
cmake('..', *options)
make()
make("install")
- def setup_dependent_environment(self, module, spec, dependent_spec):
- # TODO treat OS that are not Linux...
- lib_suffix = '.so' if '+shared' in spec['scalapack'] else '.a'
+ def setup_dependent_package(self, module, dependent_spec):
+ spec = self.spec
+ lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so'
+ lib_suffix = lib_dsuffix if '+shared' in spec else '.a'
- spec['scalapack'].fc_link = '-L%s -lscalapack' % spec['scalapack'].prefix.lib
- spec['scalapack'].cc_link = spec['scalapack'].fc_link
- spec['scalapack'].libraries = [join_path(spec['scalapack'].prefix.lib,
- 'libscalapack%s' % lib_suffix)]
+ spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib
+ spec.cc_link = spec.fc_link
+ spec.libraries = [join_path(spec.prefix.lib, 'libscalapack%s' % lib_suffix)]
diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py
new file mode 100644
index 0000000000..06acb96736
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oce/package.py
@@ -0,0 +1,47 @@
+from spack import *
+import platform
+
+class Oce(Package):
+ """
+ Open CASCADE Community Edition:
+ patches/improvements/experiments contributed by users over the official Open CASCADE library.
+ """
+ homepage = "https://github.com/tpaviot/oce"
+ url = "https://github.com/tpaviot/oce/archive/OCE-0.17.tar.gz"
+
+ version('0.17.1', '36c67b87093c675698b483454258af91')
+ version('0.17' , 'f1a89395c4b0d199bea3db62b85f818d')
+ version('0.16.1', '4d591b240c9293e879f50d86a0cb2bb3')
+ version('0.16' , '7a4b4df5a104d75a537e25e7dd387eca')
+ version('0.15' , '7ec541a1c350ca8a684f74980e48801c')
+
+ depends_on('cmake@2.8:')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+ options.extend([
+ '-DOCE_INSTALL_PREFIX=%s' % prefix,
+ '-DOCE_BUILD_SHARED_LIB:BOOL=ON',
+ '-DOCE_BUILD_TYPE:STRING=Release',
+ '-DOCE_DATAEXCHANGE:BOOL=ON',
+ '-DOCE_DISABLE_X11:BOOL=ON',
+ '-DOCE_DRAW:BOOL=OFF',
+ '-DOCE_MODEL:BOOL=ON',
+ '-DOCE_MULTITHREAD_LIBRARY:STRING=NONE', # FIXME: add tbb
+ '-DOCE_OCAF:BOOL=ON',
+ '-DOCE_USE_TCL_TEST_FRAMEWORK:BOOL=OFF',
+ '-DOCE_VISUALISATION:BOOL=OFF',
+ '-DOCE_WITH_FREEIMAGE:BOOL=OFF',
+ '-DOCE_WITH_GL2PS:BOOL=OFF',
+ '-DOCE_WITH_OPENCL:BOOL=OFF'
+ ])
+
+ if platform.system() == 'Darwin':
+ options.extend([
+ '-DOCE_OSX_USE_COCOA:BOOL=ON',
+ ])
+
+ cmake('.', *options)
+
+ make("install/strip")
diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py
index 38b355159d..6e99c23652 100644
--- a/var/spack/repos/builtin/packages/octave/package.py
+++ b/var/spack/repos/builtin/packages/octave/package.py
@@ -62,7 +62,7 @@ class Octave(Package):
depends_on('qrupdate', when='+qrupdate')
#depends_on('qscintilla', when='+qscintilla) # TODO: add package
depends_on('qt', when='+qt')
- depends_on('SuiteSparse', when='+suitesparse')
+ depends_on('suite-sparse',when='+suitesparse')
depends_on('zlib', when='+zlib')
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 3c909360a4..1d10f217c4 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -1,10 +1,13 @@
from spack import *
+import sys
class Openblas(Package):
"""OpenBLAS: An optimized BLAS library"""
homepage = "http://www.openblas.net"
url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+ version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1')
+ version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc')
version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
# virtual dependency
@@ -12,16 +15,24 @@ class Openblas(Package):
provides('lapack')
def install(self, spec, prefix):
- make('libs', 'netlib', 'shared', 'CC=cc', 'FC=f77')
+ extra=[]
+ if spec.satisfies('@0.2.16'):
+ extra.extend([
+ 'BUILD_LAPACK_DEPRECATED=1' # fix missing _dggsvd_ and _sggsvd_
+ ])
+
+ make('libs', 'netlib', 'shared', 'CC=cc', 'FC=f77',*extra)
+ make("tests")
make('install', "PREFIX='%s'" % prefix)
+ lib_dsuffix = 'dylib' if sys.platform == 'darwin' else 'so'
# Blas virtual package should provide blas.a and libblas.a
with working_dir(prefix.lib):
symlink('libopenblas.a', 'blas.a')
symlink('libopenblas.a', 'libblas.a')
- symlink('libopenblas.so', 'libblas.so')
+ symlink('libopenblas.%s' % lib_dsuffix, 'libblas.%s' % lib_dsuffix)
# Lapack virtual package should provide liblapack.a
with working_dir(prefix.lib):
symlink('libopenblas.a', 'liblapack.a')
- symlink('libopenblas.so', 'liblapack.so')
+ symlink('libopenblas.%s' % lib_dsuffix, 'liblapack.%s' % lib_dsuffix)
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index e4484af8c5..9a127f1812 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -41,12 +41,13 @@ class Openmpi(Package):
def url_for_version(self, version):
return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
- def setup_dependent_environment(self, module, spec, dep_spec):
- """For dependencies, make mpicc's use spack wrapper."""
- os.environ['OMPI_CC'] = 'cc'
- os.environ['OMPI_CXX'] = 'c++'
- os.environ['OMPI_FC'] = 'f90'
- os.environ['OMPI_F77'] = 'f77'
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ spack_env.set('OMPI_CC', spack_cc)
+ spack_env.set('OMPI_CXX', spack_cxx)
+ spack_env.set('OMPI_FC', spack_fc)
+ spack_env.set('OMPI_F77', spack_f77)
+
def install(self, spec, prefix):
config_args = ["--prefix=%s" % prefix,
diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py
new file mode 100644
index 0000000000..1e2969fe64
--- /dev/null
+++ b/var/spack/repos/builtin/packages/p4est/package.py
@@ -0,0 +1,34 @@
+from spack import *
+
+class P4est(Package):
+ """Dynamic management of a collection (a forest) of adaptive octrees in parallel"""
+ homepage = "http://www.p4est.org"
+ url = "http://p4est.github.io/release/p4est-1.1.tar.gz"
+
+ version('1.1', '37ba7f4410958cfb38a2140339dbf64f')
+
+ # disable by default to make it work on frontend of clusters
+ variant('tests', default=False, description='Run small tests')
+
+ depends_on('mpi')
+
+ def install(self, spec, prefix):
+ options = ['--enable-mpi',
+ '--enable-shared',
+ '--disable-vtk-binary',
+ '--without-blas',
+ 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL',
+ 'CFLAGS=-O2',
+ 'CC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # TODO: use ENV variables or MPI class wrappers
+ 'CXX=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
+ 'FC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ 'F77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'),
+ ]
+
+ configure('--prefix=%s' % prefix, *options)
+
+ make()
+ if '+tests' in self.spec:
+ make("check")
+
+ make("install")
diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py
index 910e0aa9f9..53d69e28d9 100644
--- a/var/spack/repos/builtin/packages/papi/package.py
+++ b/var/spack/repos/builtin/packages/papi/package.py
@@ -13,6 +13,7 @@ class Papi(Package):
homepage = "http://icl.cs.utk.edu/papi/index.html"
url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.4.1.tar.gz"
+ version('5.4.3', '3211b5a5bb389fe692370f5cf4cc2412')
version('5.4.1', '9134a99219c79767a11463a76b0b01a2')
version('5.3.0', '367961dd0ab426e5ae367c2713924ffb')
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index ccf2d14c06..c16054816c 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -16,9 +16,9 @@ class Paraview(Package):
variant('osmesa', default=False, description='Enable OSMesa support')
variant('qt', default=False, description='Enable Qt support')
- variant('opengl2', default=False, description='Enable OPengl2 backend')
+ variant('opengl2', default=False, description='Enable OpenGL2 backend')
- depends_on('python', when='+python')
+ depends_on('python@2:2.7', when='+python')
depends_on('py-numpy', when='+python')
depends_on('py-matplotlib', when='+python')
depends_on('tcl', when='+tcl')
@@ -37,11 +37,11 @@ class Paraview(Package):
#depends_on('protobuf') # version mismatches?
#depends_on('sqlite') # external version not supported
depends_on('zlib')
-
+
def url_for_version(self, version):
"""Handle ParaView version-based custom URLs."""
return self._url_str % (version.up_to(2), version)
-
+
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
diff --git a/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch b/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch
new file mode 100644
index 0000000000..e4f2729483
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch
@@ -0,0 +1,71 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index ca945dd..aff8b5f 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -23,7 +23,7 @@ else()
+ set(ParMETIS_LIBRARY_TYPE STATIC)
+ endif()
+
+-include(${GKLIB_PATH}/GKlibSystem.cmake)
++include_directories(${GKLIB_PATH})
+
+ # List of paths that the compiler will search for header files.
+ # i.e., the -I equivalent
+@@ -33,7 +33,7 @@ include_directories(${GKLIB_PATH})
+ include_directories(${METIS_PATH}/include)
+
+ # List of directories that cmake will look for CMakeLists.txt
+-add_subdirectory(${METIS_PATH}/libmetis ${CMAKE_BINARY_DIR}/libmetis)
++find_library(METIS_LIBRARY metis PATHS ${METIS_PATH}/lib)
+ add_subdirectory(include)
+ add_subdirectory(libparmetis)
+ add_subdirectory(programs)
+diff --git a/libparmetis/CMakeLists.txt b/libparmetis/CMakeLists.txt
+index 9cfc8a7..e0c4de7 100644
+--- a/libparmetis/CMakeLists.txt
++++ b/libparmetis/CMakeLists.txt
+@@ -5,7 +5,10 @@ file(GLOB parmetis_sources *.c)
+ # Create libparmetis
+ add_library(parmetis ${ParMETIS_LIBRARY_TYPE} ${parmetis_sources})
+ # Link with metis and MPI libraries.
+-target_link_libraries(parmetis metis ${MPI_LIBRARIES})
++target_link_libraries(parmetis ${METIS_LIBRARY} ${MPI_LIBRARIES})
++if(UNIX)
++ target_link_libraries(parmetis m)
++endif()
+ set_target_properties(parmetis PROPERTIES LINK_FLAGS "${MPI_LINK_FLAGS}")
+
+ install(TARGETS parmetis
+diff --git a/libparmetis/parmetislib.h b/libparmetis/parmetislib.h
+index c1daeeb..07511f6 100644
+--- a/libparmetis/parmetislib.h
++++ b/libparmetis/parmetislib.h
+@@ -20,13 +20,12 @@
+
+ #include <parmetis.h>
+
+-#include "../metis/libmetis/gklib_defs.h"
++#include <gklib_defs.h>
+
+-#include <mpi.h>
++#include <mpi.h>
+
+ #include <rename.h>
+ #include <defs.h>
+ #include <struct.h>
+ #include <macros.h>
+ #include <proto.h>
+-
+diff --git a/programs/parmetisbin.h b/programs/parmetisbin.h
+index e26cd2d..d156480 100644
+--- a/programs/parmetisbin.h
++++ b/programs/parmetisbin.h
+@@ -19,7 +19,7 @@
+ #include <GKlib.h>
+ #include <parmetis.h>
+
+-#include "../metis/libmetis/gklib_defs.h"
++#include <gklib_defs.h>
+ #include "../libparmetis/rename.h"
+ #include "../libparmetis/defs.h"
+ #include "../libparmetis/struct.h"
diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py
index c897dec7e4..f5b8b6de91 100644
--- a/var/spack/repos/builtin/packages/parmetis/package.py
+++ b/var/spack/repos/builtin/packages/parmetis/package.py
@@ -25,9 +25,6 @@
from spack import *
-# FIXME : lot of code is duplicated from packages/metis/package.py . Inheriting from there may reduce
-# FIXME : the installation rules to just a few lines
-
class Parmetis(Package):
"""
@@ -43,13 +40,17 @@ class Parmetis(Package):
variant('debug', default=False, description='Builds the library in debug mode')
variant('gdb', default=False, description='Enables gdb support')
- variant('idx64', default=False, description='Use int64_t as default index type')
- variant('double', default=False, description='Use double precision floating point types')
-
depends_on('cmake @2.8:') # build dependency
depends_on('mpi')
- # FIXME : this should conflict with metis as it builds its own version internally
+ patch('enable_external_metis.patch')
+ depends_on('metis')
+
+ # bug fixes from PETSc developers
+ # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/
+ patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch')
+ # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/
+ patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch')
depends_on('gdb', when='+gdb')
@@ -63,8 +64,8 @@ class Parmetis(Package):
# FIXME : Once a contract is defined, MPI compilers should be retrieved indirectly via spec['mpi'] in case
# FIXME : they use a non-standard name
- options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=metis_source),
- '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=metis_source),
+ options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=spec['metis'].prefix.include),
+ '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=spec['metis'].prefix),
'-DCMAKE_C_COMPILER:STRING=mpicc',
'-DCMAKE_CXX_COMPILER:STRING=mpicxx'])
@@ -78,18 +79,7 @@ class Parmetis(Package):
if '+gdb' in spec:
options.append('-DGDB:BOOL=ON')
- metis_header = join_path(metis_source, 'include', 'metis.h')
-
- if '+idx64' in spec:
- filter_file('IDXTYPEWIDTH 32', 'IDXTYPEWIDTH 64', metis_header)
-
- if '+double' in spec:
- filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header)
-
with working_dir(build_directory, create=True):
cmake(source_directory, *options)
make()
make("install")
- # Parmetis build system doesn't allow for an external metis to be used, but doesn't copy the required
- # metis header either
- install(metis_header, self.prefix.include)
diff --git a/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch b/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch
new file mode 100644
index 0000000000..e6b8056c21
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch
@@ -0,0 +1,77 @@
+From 1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b Mon Sep 17 00:00:00 2001
+From: Jed Brown <jed@59A2.org>
+Date: Fri, 12 Oct 2012 15:45:10 -0500
+Subject: [PATCH] ParMetis bug fixes reported by John Fettig [petsc-maint
+ #133631]
+
+'''
+I have also reported to to Karypis but have received zero
+response and he hasn't released any updates to the original release
+either. At least he approved my forum posting so that other people
+can see the bug and the fix.
+http://glaros.dtc.umn.edu/gkhome/node/837
+'''
+
+Hg-commit: 1c2b9fe39201d404b493885093b5992028b9b8d4
+---
+ libparmetis/xyzpart.c | 12 ++++++------
+ 1 file changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/libparmetis/xyzpart.c b/libparmetis/xyzpart.c
+index 3a2c289..63abfcb 100644
+--- a/libparmetis/xyzpart.c
++++ b/libparmetis/xyzpart.c
+@@ -104,7 +104,7 @@ void IRBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+
+ for (i=0; i<nbins; i++)
+ emarkers[i] = gmin + (gmax-gmin)*i/nbins;
+- emarkers[nbins] = gmax*(1.0+2.0*REAL_EPSILON);
++ emarkers[nbins] = gmax*(1.0+copysign(1.0,gmax)*2.0*REAL_EPSILON);
+
+ /* get into a iterative backet boundary refinement */
+ for (l=0; l<5; l++) {
+@@ -152,7 +152,7 @@ void IRBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+ }
+ }
+ nemarkers[0] = gmin;
+- nemarkers[nbins] = gmax*(1.0+2.0*REAL_EPSILON);
++ nemarkers[nbins] = gmax*(1.0+copysign(1.0,gmax)*2.0*REAL_EPSILON);
+ rcopy(nbins+1, nemarkers, emarkers);
+ }
+
+@@ -218,7 +218,7 @@ void RBBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+
+ emarkers[0] = gmin;
+ emarkers[1] = gsum/gnvtxs;
+- emarkers[2] = gmax*(1.0+2.0*REAL_EPSILON);
++ emarkers[2] = gmax*(1.0+(gmax < 0 ? -1. : 1.)*2.0*REAL_EPSILON);
+ cnbins = 2;
+
+ /* get into a iterative backet boundary refinement */
+@@ -227,7 +227,7 @@ void RBBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+ iset(cnbins, 0, lcounts);
+ rset(cnbins, 0, lsums);
+ for (j=0, i=0; i<nvtxs;) {
+- if (cand[i].key < emarkers[j+1]) {
++ if (cand[i].key <= emarkers[j+1]) {
+ lcounts[j]++;
+ lsums[j] += cand[i].key;
+ i++;
+@@ -272,12 +272,12 @@ void RBBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+
+ rsorti(cnbins, nemarkers);
+ rcopy(cnbins, nemarkers, emarkers);
+- emarkers[cnbins] = gmax*(1.0+2.0*REAL_EPSILON);
++ emarkers[cnbins] = gmax*(1.0+(gmax < 0 ? -1. : 1.)*2.0*REAL_EPSILON);
+ }
+
+ /* assign the coordinate to the appropriate bin */
+ for (j=0, i=0; i<nvtxs;) {
+- if (cand[i].key < emarkers[j+1]) {
++ if (cand[i].key <= emarkers[j+1]) {
+ bxyz[cand[i].val*ndims+k] = j;
+ i++;
+ }
+--
+2.1.1.1.g1fb337f
+
diff --git a/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch b/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch
new file mode 100644
index 0000000000..9651d55347
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parmetis/pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch
@@ -0,0 +1,35 @@
+From 82409d68aa1d6cbc70740d0f35024aae17f7d5cb Mon Sep 17 00:00:00 2001
+From: Sean Farley <sean@mcs.anl.gov>
+Date: Tue, 20 Mar 2012 11:59:44 -0500
+Subject: [PATCH] parmetis: fix bug reported by jfettig; '<' to '<=' in xyzpart
+
+Hg-commit: 2dd2eae596acaabbc80e0ef875182616f868dbc2
+---
+ libparmetis/xyzpart.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/libparmetis/xyzpart.c b/libparmetis/xyzpart.c
+index 307aed9..3a2c289 100644
+--- a/libparmetis/xyzpart.c
++++ b/libparmetis/xyzpart.c
+@@ -111,7 +111,7 @@ void IRBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+ /* determine bucket counts */
+ iset(nbins, 0, lcounts);
+ for (j=0, i=0; i<nvtxs;) {
+- if (cand[i].key < emarkers[j+1]) {
++ if (cand[i].key <= emarkers[j+1]) {
+ lcounts[j]++;
+ i++;
+ }
+@@ -158,7 +158,7 @@ void IRBinCoordinates(ctrl_t *ctrl, graph_t *graph, idx_t ndims, real_t *xyz,
+
+ /* assign the coordinate to the appropriate bin */
+ for (j=0, i=0; i<nvtxs;) {
+- if (cand[i].key < emarkers[j+1]) {
++ if (cand[i].key <= emarkers[j+1]) {
+ bxyz[cand[i].val*ndims+k] = j;
+ i++;
+ }
+--
+2.1.1.1.g1fb337f
+
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index efe172fc08..3dd117eed1 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -61,14 +61,24 @@ class Petsc(Package):
errors = ['incompatible variants given'] + errors
raise RuntimeError('\n'.join(errors))
else:
- compiler_opts = [
- '--with-mpi=1',
- '--with-mpi-dir=%s' % self.spec['mpi'].prefix,
- ]
+ if self.compiler.name == "clang":
+ compiler_opts = [
+ '--with-mpi=1',
+ '--with-cc=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # Avoid confusing PETSc config by clang: warning: argument unused during compilation
+ '--with-cxx=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
+ '--with-fc=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ '--with-f77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'),
+ ]
+ else:
+ compiler_opts = [
+ '--with-mpi=1',
+ '--with-mpi-dir=%s' % self.spec['mpi'].prefix,
+ ]
return compiler_opts
def install(self, spec, prefix):
- options = []
+ options = ['--with-debugging=0',
+ '--with-ssl=0']
options.extend(self.mpi_dependent_options())
options.extend([
'--with-precision=%s' % ('double' if '+double' in spec else 'single'),
@@ -90,3 +100,7 @@ class Petsc(Package):
# PETSc has its own way of doing parallel make.
make('MAKE_NP=%s' % make_jobs, parallel=False)
make("install")
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ # set up PETSC_DIR for everyone using PETSc package
+ spack_env.set('PETSC_DIR', self.prefix)
diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index e7c6cf0264..4fee99098e 100644
--- a/var/spack/repos/builtin/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
@@ -1,11 +1,12 @@
from spack import *
+
class PyNose(Package):
"""nose extends the test loading and running features of unittest,
making it easier to write, find and run tests."""
homepage = "https://pypi.python.org/pypi/nose"
- url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
+ url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
version('1.3.4', '6ed7169887580ddc9a8e16048d38274d')
version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16')
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
index 0354811186..a08e612df6 100644
--- a/var/spack/repos/builtin/packages/py-numpy/package.py
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -1,24 +1,43 @@
from spack import *
class PyNumpy(Package):
- """array processing for numbers, strings, records, and objects."""
- homepage = "https://pypi.python.org/pypi/numpy"
+ """NumPy is the fundamental package for scientific computing with Python.
+ It contains among other things: a powerful N-dimensional array object,
+ sophisticated (broadcasting) functions, tools for integrating C/C++ and
+ Fortran code, and useful linear algebra, Fourier transform, and random
+ number capabilities"""
+ homepage = "http://www.numpy.org/"
url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
- version('1.9.1', '78842b73560ec378142665e712ae4ad9')
- version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645')
+ version('1.10.4', 'aed294de0aa1ac7bd3f9745f4f1968ad')
+ version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645')
+ version('1.9.1', '78842b73560ec378142665e712ae4ad9')
- variant('blas', default=True)
+
+ variant('blas', default=True)
+ variant('lapack', default=True)
extends('python')
depends_on('py-nose')
- depends_on('netlib-blas+fpic', when='+blas')
- depends_on('netlib-lapack+shared', when='+blas')
+ depends_on('blas', when='+blas')
+ depends_on('lapack', when='+lapack')
def install(self, spec, prefix):
+ libraries = []
+ library_dirs = []
+
if '+blas' in spec:
+ libraries.append('blas')
+ library_dirs.append(spec['blas'].prefix.lib)
+ if '+lapack' in spec:
+ libraries.append('lapack')
+ library_dirs.append(spec['lapack'].prefix.lib)
+
+ if '+blas' in spec or '+lapack' in spec:
with open('site.cfg', 'w') as f:
f.write('[DEFAULT]\n')
- f.write('libraries=lapack,blas\n')
- f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix))
+ f.write('libraries=%s\n' % ','.join(libraries))
+ f.write('library_dirs=%s\n' % ':'.join(library_dirs))
+
python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index 3a1124cc15..c2161c90c4 100644
--- a/var/spack/repos/builtin/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
@@ -2,11 +2,12 @@ from spack import *
class PyScipy(Package):
"""Scientific Library for Python."""
- homepage = "https://pypi.python.org/pypi/scipy"
+ homepage = "http://www.scipy.org/"
url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz"
- version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a')
+ version('0.17.0', '5ff2971e1ce90e762c59d2cd84837224')
version('0.15.1', 'be56cd8e60591d6332aac792a5880110')
+ version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a')
extends('python')
depends_on('py-nose')
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index dd240d1ea0..f5237c3b57 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -1,11 +1,14 @@
+import functools
+import glob
+import inspect
import os
import re
from contextlib import closing
-from llnl.util.lang import match_predicate
-from spack.util.environment import *
-from spack import *
import spack
+from llnl.util.lang import match_predicate
+from spack import *
+from spack.util.environment import *
class Python(Package):
@@ -90,35 +93,46 @@ class Python(Package):
return os.path.join(self.python_lib_dir, 'site-packages')
- def setup_dependent_environment(self, module, spec, ext_spec):
- """Called before python modules' install() methods.
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ # TODO: do this only for actual extensions.
+
+ # Set PYTHONPATH to include site-packages dir for the
+ # extension and any other python extensions it depends on.
+ python_paths = []
+ for d in extension_spec.traverse():
+ if d.package.extends(self.spec):
+ python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
+
+ pythonpath = ':'.join(python_paths)
+ spack_env.set('PYTHONPATH', pythonpath)
+
+ # For run time environment set only the path for extension_spec and prepend it to PYTHONPATH
+ if extension_spec.package.extends(self.spec):
+ run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir))
+
+
+ def setup_dependent_package(self, module, ext_spec):
+ """
+ Called before python modules' install() methods.
In most cases, extensions will only need to have one line::
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ python('setup.py', 'install', '--prefix=%s' % prefix)
"""
# Python extension builds can have a global python executable function
if self.version >= Version("3.0.0") and self.version < Version("4.0.0"):
- module.python = Executable(join_path(spec.prefix.bin, 'python3'))
+ module.python = Executable(join_path(self.spec.prefix.bin, 'python3'))
else:
- module.python = Executable(join_path(spec.prefix.bin, 'python'))
+ module.python = Executable(join_path(self.spec.prefix.bin, 'python'))
# Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir)
module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir)
module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir)
- # Make the site packages directory if it does not exist already.
- mkdirp(module.site_packages_dir)
-
- # Set PYTHONPATH to include site-packages dir for the
- # extension and any other python extensions it depends on.
- python_paths = []
- for d in ext_spec.traverse():
- if d.package.extends(self.spec):
- python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
- os.environ['PYTHONPATH'] = ':'.join(python_paths)
-
+ # Make the site packages directory for extensions, if it does not exist already.
+ if ext_spec.package.is_extension:
+ mkdirp(module.site_packages_dir)
# ========================================================================
# Handle specifics of activating and deactivating python modules.
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index ef5f05601f..d08e8e81e1 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -56,9 +56,12 @@ class Qt(Package):
depends_on("libxcb")
- def setup_dependent_environment(self, module, spec, dep_spec):
- """Dependencies of Qt find it using the QTDIR environment variable."""
- os.environ['QTDIR'] = self.prefix
+ def setup_environment(self, spack_env, env):
+ env.set('QTDIR', self.prefix)
+
+
+ def setup_dependent_environment(self, spack_env, run_env, dspec):
+ spack_env.set('QTDIR', self.prefix)
def patch(self):
diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py
index 6b6242362c..e13677e4d2 100644
--- a/var/spack/repos/builtin/packages/ruby/package.py
+++ b/var/spack/repos/builtin/packages/ruby/package.py
@@ -1,9 +1,8 @@
from spack import *
-import spack
-import os
+
class Ruby(Package):
- """A dynamic, open source programming language with a focus on
+ """A dynamic, open source programming language with a focus on
simplicity and productivity."""
homepage = "https://www.ruby-lang.org/"
@@ -15,11 +14,23 @@ class Ruby(Package):
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
-
make()
make("install")
- def setup_dependent_environment(self, module, spec, ext_spec):
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ # TODO: do this only for actual extensions.
+ # Set GEM_PATH to include dependent gem directories
+ ruby_paths = []
+ for d in extension_spec.traverse():
+ if d.package.extends(self.spec):
+ ruby_paths.append(d.prefix)
+
+ spack_env.set_path('GEM_PATH', ruby_paths)
+
+ # The actual installation path for this gem
+ spack_env.set('GEM_HOME', extension_spec.prefix)
+
+ def setup_dependent_package(self, module, ext_spec):
"""Called before ruby modules' install() methods. Sets GEM_HOME
and GEM_PATH to values appropriate for the package being built.
@@ -28,14 +39,5 @@ class Ruby(Package):
gem('install', '<gem-name>.gem')
"""
# Ruby extension builds have global ruby and gem functions
- module.ruby = Executable(join_path(spec.prefix.bin, 'ruby'))
- module.gem = Executable(join_path(spec.prefix.bin, 'gem'))
-
- # Set GEM_PATH to include dependent gem directories
- ruby_paths = []
- for d in ext_spec.traverse():
- if d.package.extends(self.spec):
- ruby_paths.append(d.prefix)
- os.environ['GEM_PATH'] = ':'.join(ruby_paths)
- # The actual installation path for this gem
- os.environ['GEM_HOME'] = ext_spec.prefix
+ module.ruby = Executable(join_path(self.spec.prefix.bin, 'ruby'))
+ module.gem = Executable(join_path(self.spec.prefix.bin, 'gem'))
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index d1aed78e0e..b7894e4d2b 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -5,24 +5,35 @@ class Silo(Package):
data to binary, disk files."""
homepage = "http://wci.llnl.gov/simulation/computer-codes/silo"
- url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.8/silo-4.8.tar.gz"
+ base_url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo"
+ version('4.10.2', '9ceac777a2f2469ac8cef40f4fab49c8')
+ version('4.9', 'a83eda4f06761a86726e918fc55e782a')
version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9')
variant('fortran', default=True, description='Enable Fortran support')
+ variant('silex', default=False, description='Builds Silex, a GUI for viewing Silo files')
- depends_on("hdf5")
+ depends_on('hdf5')
+ depends_on('qt', when='+silex')
def install(self, spec, prefix):
config_args = [
'--enable-fortran' if '+fortran' in spec else '--disable-fortran',
+ '--enable-silex' if '+silex' in spec else '--disable-silex',
]
+ if '+silex' in spec:
+ config_args.append('--with-Qt-dir=%s' % spec['qt'].prefix)
+
configure(
- "--prefix=%s" % prefix,
- "--with-hdf5=%s,%s" % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib),
- "--with-zlib=%s,%s" % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib),
+ '--prefix=%s' % prefix,
+ '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib),
+ '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib),
*config_args)
make()
- make("install")
+ make('install')
+
+ def url_for_version(self, version):
+ return '%s/silo-%s/silo-%s.tar.gz' % (Silo.base_url, version, version)
diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py
new file mode 100644
index 0000000000..8b5f24394f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/slepc/package.py
@@ -0,0 +1,49 @@
+import os
+from spack import *
+
+
+class Slepc(Package):
+ """
+ Scalable Library for Eigenvalue Computations.
+ """
+
+ homepage = "http://www.grycap.upv.es/slepc"
+ url = "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz"
+
+ version('3.6.2', '2ab4311bed26ccf7771818665991b2ea3a9b15f97e29fd13911ab1293e8e65df')
+
+ variant('arpack', default=False, description='Enables Arpack wrappers')
+
+ depends_on('petsc')
+ depends_on('arpack-ng~mpi',when='+arpack^petsc~mpi')
+ depends_on('arpack-ng+mpi',when='+arpack^petsc+mpi')
+
+ def install(self, spec, prefix):
+ # set SLEPC_DIR for installation
+ os.environ['SLEPC_DIR'] = self.stage.source_path
+
+ options = []
+
+ if '+arpack' in spec:
+ options.extend([
+ '--with-arpack-dir=%s' % spec['arpack-ng'].prefix.lib,
+ ])
+ if 'arpack-ng~mpi' in spec:
+ options.extend([
+ '--with-arpack-flags=-larpack'
+ ])
+ else:
+ options.extend([
+ '--with-arpack-flags=-lparpack,-larpack'
+ ])
+
+ configure('--prefix=%s' % prefix, *options)
+
+ make('MAKE_NP=%s' % make_jobs, parallel=False)
+ #FIXME:
+ # make('test')
+ make('install')
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ # set up SLEPC_DIR for everyone using SLEPc package
+ spack_env.set('SLEPC_DIR', self.prefix)
diff --git a/var/spack/repos/builtin/packages/SuiteSparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py
index 6e130d118f..b57f9967c3 100644
--- a/var/spack/repos/builtin/packages/SuiteSparse/package.py
+++ b/var/spack/repos/builtin/packages/suite-sparse/package.py
@@ -1,7 +1,7 @@
from spack import *
-class Suitesparse(Package):
+class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
@@ -24,4 +24,3 @@ class Suitesparse(Package):
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
-
diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py
new file mode 100644
index 0000000000..9a94de8ba5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/superlu-dist/package.py
@@ -0,0 +1,64 @@
+from spack import *
+
+class SuperluDist(Package):
+ """A general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines."""
+ homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/"
+ url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz"
+
+ version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae')
+ version('4.2', 'ae9fafae161f775fbac6eba11e530a65')
+ version('4.1', '4edee38cc29f687bd0c8eb361096a455')
+ version('4.0', 'c0b98b611df227ae050bc1635c6940e0')
+
+ depends_on ('mpi')
+ depends_on ('blas')
+ depends_on ('lapack')
+ depends_on ('parmetis')
+ depends_on ('metis')
+
+ def install(self, spec, prefix):
+ makefile_inc = []
+ makefile_inc.extend([
+ 'PLAT = _mac_x',
+ 'DSuperLUroot = %s' % self.stage.source_path, #self.stage.path, prefix
+ 'DSUPERLULIB = $(DSuperLUroot)/lib/libsuperlu_dist.a',
+ 'BLASDEF = -DUSE_VENDOR_BLAS',
+ 'BLASLIB = -L%s -llapack %s -lblas' % (spec['lapack'].prefix.lib, spec['blas'].prefix.lib), # FIXME: avoid hardcoding blas/lapack lib names
+ 'METISLIB = -L%s -lmetis' % spec['metis'].prefix.lib,
+ 'PARMETISLIB = -L%s -lparmetis' % spec['parmetis'].prefix.lib,
+ 'FLIBS =',
+ 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)',
+ 'ARCH = ar',
+ 'ARCHFLAGS = cr',
+ 'RANLIB = true',
+ 'CC = mpicc', # FIXME avoid hardcoding MPI compiler names
+ 'CFLAGS = -fPIC -std=c99 -O2 -I%s -I%s' %(spec['parmetis'].prefix.include, spec['metis'].prefix.include),
+ 'NOOPTS = -fPIC -std=c99',
+ 'FORTRAN = mpif77',
+ 'F90FLAGS = -O2',
+ 'LOADER = mpif77',
+ 'LOADOPTS =',
+ 'CDEFS = -DAdd_'
+ ])
+
+ #with working_dir('src'):
+ with open('make.inc', 'w') as fh:
+ fh.write('\n'.join(makefile_inc))
+
+ make("lib", parallel=False)
+
+ # FIXME:
+ # cd "EXAMPLE" do
+ # system "make"
+
+ # need to install by hand
+ headers_location = join_path(self.prefix.include,'superlu_dist')
+ mkdirp(headers_location)
+ mkdirp(prefix.lib)
+ # FIXME: fetch all headers in the folder automatically
+ for header in ['Cnames.h','cublas_utils.h','dcomplex.h','html_mainpage.h','machines.h','old_colamd.h','psymbfact.h','superlu_ddefs.h','superlu_defs.h','superlu_enum_consts.h','superlu_zdefs.h','supermatrix.h','util_dist.h']:
+ superludist_header = join_path(self.stage.source_path, 'SRC/',header)
+ install(superludist_header, headers_location)
+
+ superludist_lib = join_path(self.stage.source_path, 'lib/libsuperlu_dist.a')
+ install(superludist_lib,self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/tbb/package.py b/var/spack/repos/builtin/packages/tbb/package.py
new file mode 100644
index 0000000000..56ffe4c27c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tbb/package.py
@@ -0,0 +1,79 @@
+from spack import *
+import os
+import glob
+
+class Tbb(Package):
+ """Widely used C++ template library for task parallelism.
+ Intel Threading Building Blocks (Intel TBB) lets you easily write parallel
+ C++ programs that take full advantage of multicore performance, that are
+ portable and composable, and that have future-proof scalability.
+ """
+ homepage = "http://www.threadingbuildingblocks.org/"
+
+ # Only version-specific URL's work for TBB
+ version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1', url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz')
+
+ def coerce_to_spack(self,tbb_build_subdir):
+ for compiler in ["icc","gcc","clang"]:
+ fs = glob.glob(join_path(tbb_build_subdir,"*.%s.inc" % compiler ))
+ for f in fs:
+ lines = open(f).readlines()
+ of = open(f,"w")
+ for l in lines:
+ if l.strip().startswith("CPLUS ="):
+ of.write("# coerced to spack\n")
+ of.write("CPLUS = $(CXX)\n")
+ elif l.strip().startswith("CPLUS ="):
+ of.write("# coerced to spack\n")
+ of.write("CONLY = $(CC)\n")
+ else:
+ of.write(l);
+
+ def install(self, spec, prefix):
+ #
+ # we need to follow TBB's compiler selection logic to get the proper build + link flags
+ # but we still need to use spack's compiler wrappers
+ # to accomplish this, we do two things:
+ #
+ # * Look at the spack spec to determine which compiler we should pass to tbb's Makefile
+ #
+ # * patch tbb's build system to use the compiler wrappers (CC, CXX) for
+ # icc, gcc, clang
+ # (see coerce_to_spack())
+ #
+ self.coerce_to_spack("build")
+
+ if spec.satisfies('%clang'):
+ tbb_compiler = "clang"
+ elif spec.satisfies('%intel'):
+ tbb_compiler = "icc"
+ else:
+ tbb_compiler = "gcc"
+
+
+ mkdirp(prefix)
+ mkdirp(prefix.lib)
+
+ #
+ # tbb does not have a configure script or make install target
+ # we simply call make, and try to put the pieces together
+ #
+ make("compiler=%s" %(tbb_compiler))
+
+ # install headers to {prefix}/include
+ install_tree('include',prefix.include)
+
+ # install libs to {prefix}/lib
+ tbb_lib_names = ["libtbb",
+ "libtbbmalloc",
+ "libtbbmalloc_proxy"]
+
+ for lib_name in tbb_lib_names:
+ # install release libs
+ fs = glob.glob(join_path("build","*release",lib_name + ".*"))
+ for f in fs:
+ install(f, prefix.lib)
+ # install debug libs if they exist
+ fs = glob.glob(join_path("build","*debug",lib_name + "_debug.*"))
+ for f in fs:
+ install(f, prefix.lib)