diff options
147 files changed, 3465 insertions, 752 deletions
diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..a1271a94fc --- /dev/null +++ b/.coveragerc @@ -0,0 +1,34 @@ +# -*- conf -*- +# .coveragerc to control coverage.py +[run] +branch = True +source = lib +omit = + lib/spack/spack/test/* + lib/spack/env/* + lib/spack/docs/* + lib/spack/external/* + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if False: + if __name__ == .__main__.: + +ignore_errors = True + +[html] +directory = htmlcov diff --git a/.gitignore b/.gitignore index 4b97de5d50..643e5d9b03 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,5 @@ /share/spack/dotkit /share/spack/modules /TAGS +/htmlcov +.coverage diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 0000000000..4741fb4f3b --- /dev/null +++ b/.style.yapf @@ -0,0 +1,3 @@ +[style] +based_on_style = pep8 +column_limit = 80 diff --git a/.travis.yml b/.travis.yml index 1bed6b0874..4ff4d5f483 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,20 +6,32 @@ python: # Use new Travis infrastructure (Docker can't sudo yet) sudo: false -# No need to install any deps. -install: true +# Install coveralls to obtain code coverage +install: + - "pip install coveralls" + - "pip install flake8" before_install: # Need this for the git tests to succeed. - git config --global user.email "spack@example.com" - git config --global user.name "Test User" + # Need this to be able to compute the list of changed files + - git fetch origin develop:develop script: - . share/spack/setup-env.sh - spack compilers - spack config get compilers - - spack test - spack install -v libdwarf + # Run unit tests with code coverage + - coverage run bin/spack test + # Checks if the file that have been changed are flake8 conformant + - CHANGED_PYTHON_FILES=`git diff develop... --name-only | perl -ne 'print if /\.py$/'` + - if [[ ${CHANGED_PYTHON_FILES} ]] ; then flake8 --format pylint --config flake8.ini ${CHANGED_PYTHON_FILES} ; fi + + +after_success: + - coveralls notifications: email: @@ -1,7 +1,8 @@ ![image](share/spack/logo/spack-logo-text-64.png "Spack") ============ -[![Build Status](https://travis-ci.org/LLNL/spack.png?branch=develop)](https://travis-ci.org/LLNL/spack) +[![Build Status](https://travis-ci.org/LLNL/spack.svg?branch=develop)](https://travis-ci.org/LLNL/spack) +[![Coverage Status](https://coveralls.io/repos/github/LLNL/spack/badge.svg?branch=develop)](https://coveralls.io/github/LLNL/spack?branch=develop) Spack is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms diff --git a/flake8.ini b/flake8.ini new file mode 100644 index 0000000000..757c71705e --- /dev/null +++ b/flake8.ini @@ -0,0 +1,3 @@ +[flake8] +ignore = W391,F403 +max-line-length = 120
\ No newline at end of file diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 68f3d07b29..29791d98c4 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -372,25 +372,32 @@ how this is done is in :ref:`sec-specs`. ``spack compiler add`` ~~~~~~~~~~~~~~~~~~~~~~~ +An alias for ``spack compiler find``. + +.. _spack-compiler-find: + +``spack compiler find`` +~~~~~~~~~~~~~~~~~~~~~~~ + If you do not see a compiler in this list, but you want to use it with -Spack, you can simply run ``spack compiler add`` with the path to +Spack, you can simply run ``spack compiler find`` with the path to where the compiler is installed. For example:: - $ spack compiler add /usr/local/tools/ic-13.0.079 + $ spack compiler find /usr/local/tools/ic-13.0.079 ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml intel@13.0.079 -Or you can run ``spack compiler add`` with no arguments to force +Or you can run ``spack compiler find`` with no arguments to force auto-detection. This is useful if you do not know where compilers are installed, but you know that new compilers have been added to your ``PATH``. For example, using dotkit, you might do this:: $ module load gcc-4.9.0 - $ spack compiler add + $ spack compiler find ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml gcc@4.9.0 -This loads the environment module for gcc-4.9.0 to get it into the +This loads the environment module for gcc-4.9.0 to add it to ``PATH``, and then it adds the compiler to Spack. .. _spack-compiler-info: @@ -807,17 +814,22 @@ Environment Modules, you can get it with Spack: 1. Install with:: +.. code-block:: sh + spack install environment-modules 2. Activate with:: - MODULES_HOME=`spack location -i environment-modules` - MODULES_VERSION=`ls -1 $MODULES_HOME/Modules | head -1` - ${MODULES_HOME}/Modules/${MODULES_VERSION}/bin/add.modules +Add the following two lines to your ``.bashrc`` profile (or similar): + +.. code-block:: sh + + MODULES_HOME=`spack location -i environment-modules` + source ${MODULES_HOME}/Modules/init/bash + +In case you use a Unix shell other than bash, substitute ``bash`` by +the appropriate file in ``${MODULES_HOME}/Modules/init/``. -This adds to your ``.bashrc`` (or similar) files, enabling Environment -Modules when you log in. It will ask your permission before changing -any files. Spack and Environment Modules ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 519c0da232..31c676d4f5 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1831,6 +1831,42 @@ successfully find ``libdwarf.h`` and ``libdwarf.so``, without the packager having to provide ``--with-libdwarf=/path/to/libdwarf`` on the command line. +Compiler flags +~~~~~~~~~~~~~~ +In rare circumstances such as compiling and running small unit tests, a package +developer may need to know what are the appropriate compiler flags to enable +features like ``OpenMP``, ``c++11``, ``c++14`` and alike. To that end the +compiler classes in ``spack`` implement the following _properties_ : +``openmp_flag``, ``cxx11_flag``, ``cxx14_flag``, which can be accessed in a +package by ``self.compiler.cxx11_flag`` and alike. Note that the implementation +is such that if a given compiler version does not support this feature, an +error will be produced. Therefore package developers can also use these properties +to assert that a compiler supports the requested feature. This is handy when a +package supports additional variants like + +.. code-block:: python + + variant('openmp', default=True, description="Enable OpenMP support.") + + + +Message Parsing Interface (MPI) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +It is common for high performance computing software/packages to use ``MPI``. +As a result of conretization, a given package can be built using different +implementations of MPI such as ``Openmpi``, ``MPICH`` or ``IntelMPI``. +In some scenarios to configure a package one have to provide it with appropriate MPI +compiler wrappers such as ``mpicc``, ``mpic++``. +However different implementations of ``MPI`` may have different names for those +wrappers. In order to make package's ``install()`` method indifferent to the +choice ``MPI`` implementation, each package which implements ``MPI`` sets up +``self.spec.mpicc``, ``self.spec.mpicxx``, ``self.spec.mpifc`` and ``self.spec.mpif77`` +to point to ``C``, ``C++``, ``Fortran 90`` and ``Fortran 77`` ``MPI`` wrappers. +Package developers are advised to use these variables, for example ``self.spec['mpi'].mpicc`` +instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for +the reasons outlined above. + + Forking ``install()`` ~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 18fd8f7bdb..cb07a2ffea 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -162,7 +162,7 @@ fi # It doesn't work with -rpath. # This variable controls whether they are added. add_rpaths=true -if [[ mode == ld && $OSTYPE == darwin* ]]; then +if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then for arg in "$@"; do if [[ $arg == -r ]]; then add_rpaths=false diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index aee11f061f..9108e1d0e3 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -136,9 +136,7 @@ for path in _tmp_candidates: # don't add a second username if it's already unique by user. if not _tmp_user in path: tmp_dirs.append(join_path(path, '%u', 'spack-stage')) - -for path in _tmp_candidates: - if not path in tmp_dirs: + else: tmp_dirs.append(join_path(path, 'spack-stage')) # Whether spack should allow installation of unsafe versions of diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index eb72f2a6b4..cd9f647ddf 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -175,8 +175,8 @@ def set_build_environment_variables(pkg, env): # Add any pkgconfig directories to PKG_CONFIG_PATH pkg_config_dirs = [] for p in dep_prefixes: - for libdir in ('lib', 'lib64'): - pcdir = join_path(p, libdir, 'pkgconfig') + for maybe in ('lib', 'lib64', 'share'): + pcdir = join_path(p, maybe, 'pkgconfig') if os.path.isdir(pcdir): pkg_config_dirs.append(pcdir) env.set_path('PKG_CONFIG_PATH', pkg_config_dirs) diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 3e58e82184..a8e9e2a7a5 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -44,10 +44,10 @@ def setup_parser(subparser): scopes = spack.config.config_scopes - # Add - add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.') - add_parser.add_argument('add_paths', nargs=argparse.REMAINDER) - add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, + # Find + find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.') + find_parser.add_argument('add_paths', nargs=argparse.REMAINDER) + find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") # Remove @@ -70,7 +70,7 @@ def setup_parser(subparser): help="Configuration scope to read from.") -def compiler_add(args): +def compiler_find(args): """Search either $PATH or a list of paths for compilers and add them to Spack's configuration.""" paths = args.add_paths @@ -136,7 +136,8 @@ def compiler_list(args): def compiler(parser, args): - action = { 'add' : compiler_add, + action = { 'add' : compiler_find, + 'find' : compiler_find, 'remove' : compiler_remove, 'rm' : compiler_remove, 'info' : compiler_info, diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 315d9fc926..a67f5c0c13 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -22,21 +22,16 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import os import shutil -import argparse +import sys import llnl.util.tty as tty -from llnl.util.lang import partition_list -from llnl.util.filesystem import mkdirp - import spack.cmd +from llnl.util.filesystem import mkdirp from spack.modules import module_types from spack.util.string import * -from spack.spec import Spec - description ="Manipulate modules and dotkits." @@ -98,7 +93,6 @@ def module_refresh(): cls(spec).write() - def module(parser, args): if args.module_command == 'refresh': module_refresh() diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 656873a2f0..3277e15548 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -23,87 +23,106 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import argparse -import xml.etree.ElementTree as ET -import itertools -import re -import os import codecs +import os +import time +import xml.dom.minidom +import xml.etree.ElementTree as ET import llnl.util.tty as tty -from llnl.util.filesystem import * - import spack +import spack.cmd +from llnl.util.filesystem import * from spack.build_environment import InstallError from spack.fetch_strategy import FetchError -import spack.cmd description = "Run package installation as a unit test, output formatted results." -def setup_parser(subparser): - subparser.add_argument( - '-j', '--jobs', action='store', type=int, - help="Explicitly set number of make jobs. Default is #cpus.") - - subparser.add_argument( - '-n', '--no-checksum', action='store_true', dest='no_checksum', - help="Do not check packages against checksum") - subparser.add_argument( - '-o', '--output', action='store', help="test output goes in this file") +def setup_parser(subparser): + subparser.add_argument('-j', + '--jobs', + action='store', + type=int, + help="Explicitly set number of make jobs. Default is #cpus.") - subparser.add_argument( - 'package', nargs=argparse.REMAINDER, help="spec of package to install") + subparser.add_argument('-n', + '--no-checksum', + action='store_true', + dest='no_checksum', + help="Do not check packages against checksum") + subparser.add_argument('-o', '--output', action='store', help="test output goes in this file") -class JunitResultFormat(object): - def __init__(self): - self.root = ET.Element('testsuite') - self.tests = [] - - def add_test(self, buildId, testResult, buildInfo=None): - self.tests.append((buildId, testResult, buildInfo)) - - def write_to(self, stream): - self.root.set('tests', '{0}'.format(len(self.tests))) - for buildId, testResult, buildInfo in self.tests: - testcase = ET.SubElement(self.root, 'testcase') - testcase.set('classname', buildId.name) - testcase.set('name', buildId.stringId()) - if testResult == TestResult.FAILED: - failure = ET.SubElement(testcase, 'failure') - failure.set('type', "Build Error") - failure.text = buildInfo - elif testResult == TestResult.SKIPPED: - skipped = ET.SubElement(testcase, 'skipped') - skipped.set('type', "Skipped Build") - skipped.text = buildInfo - ET.ElementTree(self.root).write(stream) + subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install") class TestResult(object): PASSED = 0 FAILED = 1 SKIPPED = 2 + ERRORED = 3 -class BuildId(object): - def __init__(self, spec): - self.name = spec.name - self.version = spec.version - self.hashId = spec.dag_hash() - - def stringId(self): - return "-".join(str(x) for x in (self.name, self.version, self.hashId)) - - def __hash__(self): - return hash((self.name, self.version, self.hashId)) - - def __eq__(self, other): - if not isinstance(other, BuildId): - return False +class TestSuite(object): + def __init__(self, filename): + self.filename = filename + self.root = ET.Element('testsuite') + self.tests = [] - return ((self.name, self.version, self.hashId) == - (other.name, other.version, other.hashId)) + def __enter__(self): + return self + + def append(self, item): + if not isinstance(item, TestCase): + raise TypeError('only TestCase instances may be appended to a TestSuite instance') + self.tests.append(item) # Append the item to the list of tests + + def __exit__(self, exc_type, exc_val, exc_tb): + # Prepare the header for the entire test suite + number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests) + self.root.set('errors', str(number_of_errors)) + number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests) + self.root.set('failures', str(number_of_failures)) + self.root.set('tests', str(len(self.tests))) + + for item in self.tests: + self.root.append(item.element) + + with open(self.filename, 'wb') as file: + xml_string = ET.tostring(self.root) + xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml() + file.write(xml_string) + + +class TestCase(object): + + results = { + TestResult.PASSED: None, + TestResult.SKIPPED: 'skipped', + TestResult.FAILED: 'failure', + TestResult.ERRORED: 'error', + } + + def __init__(self, classname, name, time=None): + self.element = ET.Element('testcase') + self.element.set('classname', str(classname)) + self.element.set('name', str(name)) + if time is not None: + self.element.set('time', str(time)) + self.result_type = None + + def set_result(self, result_type, message=None, error_type=None, text=None): + self.result_type = result_type + result = TestCase.results[self.result_type] + if result is not None and result is not TestResult.PASSED: + subelement = ET.SubElement(self.element, result) + if error_type is not None: + subelement.set('type', error_type) + if message is not None: + subelement.set('message', str(message)) + if text is not None: + subelement.text = text def fetch_log(path): @@ -114,46 +133,76 @@ def fetch_log(path): def failed_dependencies(spec): - return set(childSpec for childSpec in spec.dependencies.itervalues() if not - spack.repo.get(childSpec).installed) - - -def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): - # Post-order traversal is not strictly required but it makes sense to output - # tests for dependencies first. - for spec in topSpec.traverse(order='post'): - if spec not in newInstalls: - continue - - failedDeps = failed_dependencies(spec) - package = spack.repo.get(spec) - if failedDeps: - result = TestResult.SKIPPED - dep = iter(failedDeps).next() - depBID = BuildId(dep) - errOutput = "Skipped due to failed dependency: {0}".format( - depBID.stringId()) - elif (not package.installed) and (not package.stage.source_path): - result = TestResult.FAILED - errOutput = "Failure to fetch package resources." - elif not package.installed: - result = TestResult.FAILED - lines = getLogFunc(package.build_log_path) - errMessages = list(line for line in lines if - re.search('error:', line, re.IGNORECASE)) - errOutput = errMessages if errMessages else lines[-10:] - errOutput = '\n'.join(itertools.chain( - [spec.to_yaml(), "Errors:"], errOutput, - ["Build Log:", package.build_log_path])) - else: - result = TestResult.PASSED - errOutput = None - - bId = BuildId(spec) - output.add_test(bId, result, errOutput) + return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed) + + +def get_top_spec_or_die(args): + specs = spack.cmd.parse_specs(args.package, concretize=True) + if len(specs) > 1: + tty.die("Only 1 top-level package can be specified") + top_spec = iter(specs).next() + return top_spec + + +def install_single_spec(spec, number_of_jobs): + package = spack.repo.get(spec) + + # If it is already installed, skip the test + if spack.repo.get(spec).installed: + testcase = TestCase(package.name, package.spec.short_spec, time=0.0) + testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed') + return testcase + + # If it relies on dependencies that did not install, skip + if failed_dependencies(spec): + testcase = TestCase(package.name, package.spec.short_spec, time=0.0) + testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed') + return testcase + + # Otherwise try to install the spec + try: + start_time = time.time() + package.do_install(keep_prefix=False, + keep_stage=True, + ignore_deps=False, + make_jobs=number_of_jobs, + verbose=True, + fake=False) + duration = time.time() - start_time + testcase = TestCase(package.name, package.spec.short_spec, duration) + testcase.set_result(TestResult.PASSED) + except InstallError: + # An InstallError is considered a failure (the recipe didn't work correctly) + duration = time.time() - start_time + # Try to get the log + lines = fetch_log(package.build_log_path) + text = '\n'.join(lines) + testcase = TestCase(package.name, package.spec.short_spec, duration) + testcase.set_result(TestResult.FAILED, message='Installation failure', text=text) + + except FetchError: + # A FetchError is considered an error (we didn't even start building) + duration = time.time() - start_time + testcase = TestCase(package.name, package.spec.short_spec, duration) + testcase.set_result(TestResult.ERRORED, message='Unable to fetch package') + + return testcase + + +def get_filename(args, top_spec): + if not args.output: + fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash()) + output_directory = join_path(os.getcwd(), 'test-output') + if not os.path.exists(output_directory): + os.mkdir(output_directory) + output_filename = join_path(output_directory, fname) + else: + output_filename = args.output + return output_filename def test_install(parser, args): + # Check the input if not args.package: tty.die("install requires a package argument") @@ -162,50 +211,15 @@ def test_install(parser, args): tty.die("The -j option must be a positive integer!") if args.no_checksum: - spack.do_checksum = False # TODO: remove this global. - - specs = spack.cmd.parse_specs(args.package, concretize=True) - if len(specs) > 1: - tty.die("Only 1 top-level package can be specified") - topSpec = iter(specs).next() - - newInstalls = set() - for spec in topSpec.traverse(): - package = spack.repo.get(spec) - if not package.installed: - newInstalls.add(spec) - - if not args.output: - bId = BuildId(topSpec) - outputDir = join_path(os.getcwd(), "test-output") - if not os.path.exists(outputDir): - os.mkdir(outputDir) - outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId())) - else: - outputFpath = args.output - - for spec in topSpec.traverse(order='post'): - # Calling do_install for the top-level package would be sufficient but - # this attempts to keep going if any package fails (other packages which - # are not dependents may succeed) - package = spack.repo.get(spec) - if (not failed_dependencies(spec)) and (not package.installed): - try: - package.do_install( - keep_prefix=False, - keep_stage=True, - ignore_deps=False, - make_jobs=args.jobs, - verbose=True, - fake=False) - except InstallError: - pass - except FetchError: - pass - - jrf = JunitResultFormat() - handled = {} - create_test_output(topSpec, newInstalls, jrf) - - with open(outputFpath, 'wb') as F: - jrf.write_to(F) + spack.do_checksum = False # TODO: remove this global. + + # Get the one and only top spec + top_spec = get_top_spec_or_die(args) + # Get the filename of the test + output_filename = get_filename(args, top_spec) + # TEST SUITE + with TestSuite(output_filename) as test_suite: + # Traverse in post order : each spec is a test case + for spec in top_spec.traverse(order='post'): + test_case = install_single_spec(spec, args.jobs) + test_suite.append(test_case) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index d38c0b00b1..030dc449fc 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -94,9 +94,6 @@ class Compiler(object): # Names of generic arguments used by this compiler arg_rpath = '-Wl,-rpath,%s' - # argument used to get C++11 options - cxx11_flag = "-std=c++11" - def __init__(self, cspec, cc, cxx, f77, fc): def check(exe): @@ -117,6 +114,37 @@ class Compiler(object): def version(self): return self.spec.version + # This property should be overridden in the compiler subclass if + # OpenMP is supported by that compiler + @property + def openmp_flag(self): + # If it is not overridden, assume it is not supported and warn the user + tty.die("The compiler you have chosen does not currently support OpenMP.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") + + + # This property should be overridden in the compiler subclass if + # C++11 is supported by that compiler + @property + def cxx11_flag(self): + # If it is not overridden, assume it is not supported and warn the user + tty.die("The compiler you have chosen does not currently support C++11.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") + + + # This property should be overridden in the compiler subclass if + # C++14 is supported by that compiler + @property + def cxx14_flag(self): + # If it is not overridden, assume it is not supported and warn the user + tty.die("The compiler you have chosen does not currently support C++14.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") + + + # # Compiler classes have methods for querying the version of # specific compiler executables. This is used when discovering compilers. @@ -202,6 +230,10 @@ class Compiler(object): return None successful = [key for key in parmap(check, checks) if key is not None] + # The 'successful' list is ordered like the input paths. + # Reverse it here so that the dict creation (last insert wins) + # does not spoil the intented precedence. + successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful) @classmethod diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index e406d86a24..8c646905c7 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -26,6 +26,8 @@ import re import spack.compiler as cpr from spack.compiler import * from spack.util.executable import * +import llnl.util.tty as tty +from spack.version import ver class Clang(Compiler): # Subclasses use possible names of C compiler @@ -47,6 +49,29 @@ class Clang(Compiler): 'f77' : 'f77', 'fc' : 'f90' } + @property + def is_apple(self): + ver_string = str(self.version) + return ver_string.endswith('-apple') + + @property + def openmp_flag(self): + if self.is_apple: + tty.die("Clang from Apple does not support Openmp yet.") + else: + return "-fopenmp" + + @property + def cxx11_flag(self): + if self.is_apple: + # FIXME: figure out from which version Apple's clang supports c++11 + return "-std=c++11" + else: + if self.version < ver('3.3'): + tty.die("Only Clang 3.3 and above support c++11.") + else: + return "-std=c++11" + @classmethod def default_version(self, comp): """The '--version' option works for clang compilers. diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index 64214db32d..91c498ac82 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -50,13 +50,24 @@ class Gcc(Compiler): 'fc' : 'gcc/gfortran' } @property + def openmp_flag(self): + return "-fopenmp" + + @property def cxx11_flag(self): if self.version < ver('4.3'): tty.die("Only gcc 4.3 and above support c++11.") elif self.version < ver('4.7'): - return "-std=gnu++0x" + return "-std=c++0x" + else: + return "-std=c++11" + + @property + def cxx14_flag(self): + if self.version < ver('4.8'): + tty.die("Only gcc 4.8 and above support c++14.") else: - return "-std=gnu++11" + return "-std=c++14" @classmethod def fc_version(cls, fc): diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index 69e9764790..9b1cf07c36 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -23,6 +23,8 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * +import llnl.util.tty as tty +from spack.version import ver class Intel(Compiler): # Subclasses use possible names of C compiler @@ -44,6 +46,13 @@ class Intel(Compiler): 'fc' : 'intel/ifort' } @property + def openmp_flag(self): + if self.version < ver('16.0'): + return "-openmp" + else: + return "-qopenmp" + + @property def cxx11_flag(self): if self.version < ver('11.1'): tty.die("Only intel 11.1 and above support c++11.") @@ -68,5 +77,3 @@ class Intel(Compiler): """ return get_compiler_version( comp, '--version', r'\((?:IFORT|ICC)\) ([^ ]+)') - - diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py index 527a05a090..e9038c1039 100644 --- a/lib/spack/spack/compilers/nag.py +++ b/lib/spack/spack/compilers/nag.py @@ -1,4 +1,5 @@ from spack.compiler import * +import llnl.util.tty as tty class Nag(Compiler): # Subclasses use possible names of C compiler @@ -20,6 +21,16 @@ class Nag(Compiler): 'f77' : 'nag/nagfor', 'fc' : 'nag/nagfor' } + @property + def openmp_flag(self): + return "-openmp" + + @property + def cxx11_flag(self): + # NAG does not have a C++ compiler + # However, it can be mixed with a compiler that does support it + return "-std=c++11" + @classmethod def default_version(self, comp): """The '-V' option works for nag compilers. diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py index c6a1078bd9..94c6b8365c 100644 --- a/lib/spack/spack/compilers/pgi.py +++ b/lib/spack/spack/compilers/pgi.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * +import llnl.util.tty as tty class Pgi(Compiler): # Subclasses use possible names of C compiler @@ -43,6 +44,15 @@ class Pgi(Compiler): 'f77' : 'pgi/pgfortran', 'fc' : 'pgi/pgfortran' } + @property + def openmp_flag(self): + return "-mp" + + @property + def cxx11_flag(self): + return "-std=c++11" + + @classmethod def default_version(cls, comp): """The '-V' option works for all the PGI compilers. @@ -54,4 +64,3 @@ class Pgi(Compiler): """ return get_compiler_version( comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target') - diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py index c1d55109a3..61a2e730dc 100644 --- a/lib/spack/spack/compilers/xl.py +++ b/lib/spack/spack/compilers/xl.py @@ -24,6 +24,8 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * +import llnl.util.tty as tty +from spack.version import ver class Xl(Compiler): # Subclasses use possible names of C compiler @@ -45,6 +47,10 @@ class Xl(Compiler): 'fc' : 'xl/xlf90' } @property + def openmp_flag(self): + return "-qsmp=omp" + + @property def cxx11_flag(self): if self.version < ver('13.1'): tty.die("Only xlC 13.1 and above have some c++11 support.") diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 4ea87bea7e..ce2c4e30c7 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -157,12 +157,26 @@ class URLFetchStrategy(FetchStrategy): tty.msg("Already downloaded %s" % self.archive_file) return + possible_files = self.stage.expected_archive_files + save_file = None + partial_file = None + if possible_files: + save_file = self.stage.expected_archive_files[0] + partial_file = self.stage.expected_archive_files[0] + '.part' + tty.msg("Trying to fetch from %s" % self.url) - curl_args = ['-O', # save file to disk + if partial_file: + save_args = ['-C', '-', # continue partial downloads + '-o', partial_file] # use a .part file + else: + save_args = ['-O'] + + curl_args = save_args + [ '-f', # fail on >400 errors '-D', '-', # print out HTML headers - '-L', self.url, ] + '-L', # resolve 3xx redirects + self.url, ] if sys.stdout.isatty(): curl_args.append('-#') # status bar when using a tty @@ -178,6 +192,9 @@ class URLFetchStrategy(FetchStrategy): if self.archive_file: os.remove(self.archive_file) + if partial_file and os.path.exists(partial_file): + os.remove(partial_file) + if spack.curl.returncode == 22: # This is a 404. Curl will print the error. raise FailedDownloadError( @@ -209,6 +226,9 @@ class URLFetchStrategy(FetchStrategy): "'spack clean <package>' to remove the bad archive, then fix", "your internet gateway issue and install again.") + if save_file: + os.rename(partial_file, save_file) + if not self.archive_file: raise FailedDownloadError(self.url) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index d797af287d..61624fbd70 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -211,7 +211,11 @@ class EnvModule(object): def remove(self): mod_file = self.file_name if os.path.exists(mod_file): - shutil.rmtree(mod_file, ignore_errors=True) + try: + os.remove(mod_file) # Remove the module file + os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up + except OSError: + pass # removedirs throws OSError on first non-empty directory found class Dotkit(EnvModule): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index d711752c20..84c47ee660 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -211,6 +211,18 @@ class Stage(object): return False @property + def expected_archive_files(self): + """Possible archive file paths.""" + paths = [] + if isinstance(self.fetcher, fs.URLFetchStrategy): + paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) + + if self.mirror_path: + paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) + + return paths + + @property def archive_file(self): """Path to the source archive within this stage directory.""" paths = [] diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 175a49428c..3c5edde66b 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -61,14 +61,14 @@ test_names = ['versions', 'optional_deps', 'make_executable', 'configure_guess', - 'unit_install', 'lock', 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', - 'cmd.uninstall'] + 'cmd.uninstall', + 'cmd.test_install'] def list_tests(): diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 0b1aeb2a8f..594cd6efe9 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -219,3 +219,27 @@ class CompilerTest(unittest.TestCase): ' '.join(test_command)) + def test_ld_deps_reentrant(self): + """Make sure ld -r is handled correctly on OS's where it doesn't + support rpaths.""" + os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1]) + + os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64" + reentrant_test_command = ['-r'] + test_command + self.check_ld('dump-args', reentrant_test_command, + 'ld ' + + '-rpath ' + self.prefix + '/lib ' + + '-rpath ' + self.prefix + '/lib64 ' + + + '-L' + self.dep1 + '/lib ' + + '-rpath ' + self.dep1 + '/lib ' + + + '-r ' + + ' '.join(test_command)) + + os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64" + self.check_ld('dump-args', reentrant_test_command, + 'ld ' + + '-L' + self.dep1 + '/lib ' + + '-r ' + + ' '.join(test_command)) diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py new file mode 100644 index 0000000000..2206c7bea1 --- /dev/null +++ b/lib/spack/spack/test/cmd/test_install.py @@ -0,0 +1,190 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import collections +from contextlib import contextmanager + +import StringIO + +FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) + +# Monkey-patch open to write module files to a StringIO instance +@contextmanager +def mock_open(filename, mode): + if not mode == 'wb': + raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open') + + FILE_REGISTRY[filename] = StringIO.StringIO() + + try: + yield FILE_REGISTRY[filename] + finally: + handle = FILE_REGISTRY[filename] + FILE_REGISTRY[filename] = handle.getvalue() + handle.close() + +import os +import itertools +import unittest + +import spack +import spack.cmd + + +# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python) +test_install = __import__("spack.cmd.test-install", fromlist=['test_install']) + + +class MockSpec(object): + def __init__(self, name, version, hashStr=None): + self.dependencies = {} + self.name = name + self.version = version + self.hash = hashStr if hashStr else hash((name, version)) + + def traverse(self, order=None): + for _, spec in self.dependencies.items(): + yield spec + yield self + #allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues()) + #return set(itertools.chain([self], allDeps)) + + def dag_hash(self): + return self.hash + + @property + def short_spec(self): + return '-'.join([self.name, str(self.version), str(self.hash)]) + + +class MockPackage(object): + def __init__(self, spec, buildLogPath): + self.name = spec.name + self.spec = spec + self.installed = False + self.build_log_path = buildLogPath + + def do_install(self, *args, **kwargs): + self.installed = True + + +class MockPackageDb(object): + def __init__(self, init=None): + self.specToPkg = {} + if init: + self.specToPkg.update(init) + + def get(self, spec): + return self.specToPkg[spec] + + +def mock_fetch_log(path): + return [] + +specX = MockSpec('X', "1.2.0") +specY = MockSpec('Y', "2.3.8") +specX.dependencies['Y'] = specY +pkgX = MockPackage(specX, 'logX') +pkgY = MockPackage(specY, 'logY') + + +class MockArgs(object): + def __init__(self, package): + self.package = package + self.jobs = None + self.no_checksum = False + self.output = None + + +# TODO: add test(s) where Y fails to install +class TestInstallTest(unittest.TestCase): + """ + Tests test-install where X->Y + """ + + def setUp(self): + super(TestInstallTest, self).setUp() + + # Monkey patch parse specs + def monkey_parse_specs(x, concretize): + if x == 'X': + return [specX] + elif x == 'Y': + return [specY] + return [] + + self.parse_specs = spack.cmd.parse_specs + spack.cmd.parse_specs = monkey_parse_specs + + # Monkey patch os.mkdirp + self.os_mkdir = os.mkdir + os.mkdir = lambda x: True + + # Monkey patch open + test_install.open = mock_open + + # Clean FILE_REGISTRY + FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) + + pkgX.installed = False + pkgY.installed = False + + # Monkey patch pkgDb + self.saved_db = spack.repo + pkgDb = MockPackageDb({specX: pkgX, specY: pkgY}) + spack.repo = pkgDb + + def tearDown(self): + # Remove the monkey patched test_install.open + test_install.open = open + + # Remove the monkey patched os.mkdir + os.mkdir = self.os_mkdir + del self.os_mkdir + + # Remove the monkey patched parse_specs + spack.cmd.parse_specs = self.parse_specs + del self.parse_specs + super(TestInstallTest, self).tearDown() + + spack.repo = self.saved_db + + def test_installing_both(self): + test_install.test_install(None, MockArgs('X') ) + self.assertEqual(len(FILE_REGISTRY), 1) + for _, content in FILE_REGISTRY.items(): + self.assertTrue('tests="2"' in content) + self.assertTrue('failures="0"' in content) + self.assertTrue('errors="0"' in content) + + def test_dependency_already_installed(self): + pkgX.installed = True + pkgY.installed = True + test_install.test_install(None, MockArgs('X')) + self.assertEqual(len(FILE_REGISTRY), 1) + for _, content in FILE_REGISTRY.items(): + self.assertTrue('tests="2"' in content) + self.assertTrue('failures="0"' in content) + self.assertTrue('errors="0"' in content) + self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2) diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py index 6fd05439bf..82ba59fc48 100644 --- a/lib/spack/spack/test/mock_database.py +++ b/lib/spack/spack/test/mock_database.py @@ -17,7 +17,7 @@ class MockDatabase(MockPackagesTest): def _mock_remove(self, spec): specs = spack.installed_db.query(spec) - assert(len(specs) == 1) + assert len(specs) == 1 spec = specs[0] spec.package.do_uninstall(spec) @@ -71,6 +71,8 @@ class MockDatabase(MockPackagesTest): self._mock_install('mpileaks ^zmpi') def tearDown(self): + for spec in spack.installed_db.query(): + spec.package.do_uninstall(spec) super(MockDatabase, self).tearDown() shutil.rmtree(self.install_path) spack.install_path = self.spack_install_path diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py deleted file mode 100644 index 18615b7efe..0000000000 --- a/lib/spack/spack/test/unit_install.py +++ /dev/null @@ -1,126 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://github.com/llnl/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -import itertools -import unittest - -import spack - -test_install = __import__("spack.cmd.test-install", - fromlist=["BuildId", "create_test_output", "TestResult"]) - -class MockOutput(object): - def __init__(self): - self.results = {} - - def add_test(self, buildId, passed=True, buildInfo=None): - self.results[buildId] = passed - - def write_to(self, stream): - pass - -class MockSpec(object): - def __init__(self, name, version, hashStr=None): - self.dependencies = {} - self.name = name - self.version = version - self.hash = hashStr if hashStr else hash((name, version)) - - def traverse(self, order=None): - allDeps = itertools.chain.from_iterable(i.traverse() for i in - self.dependencies.itervalues()) - return set(itertools.chain([self], allDeps)) - - def dag_hash(self): - return self.hash - - def to_yaml(self): - return "<<<MOCK YAML {0}>>>".format(test_install.BuildId(self).stringId()) - -class MockPackage(object): - def __init__(self, buildLogPath): - self.installed = False - self.build_log_path = buildLogPath - -specX = MockSpec("X", "1.2.0") -specY = MockSpec("Y", "2.3.8") -specX.dependencies['Y'] = specY -pkgX = MockPackage('logX') -pkgY = MockPackage('logY') -bIdX = test_install.BuildId(specX) -bIdY = test_install.BuildId(specY) - -class UnitInstallTest(unittest.TestCase): - """Tests test-install where X->Y""" - - def setUp(self): - super(UnitInstallTest, self).setUp() - - pkgX.installed = False - pkgY.installed = False - - self.saved_db = spack.repo - pkgDb = MockPackageDb({specX:pkgX, specY:pkgY}) - spack.repo = pkgDb - - - def tearDown(self): - super(UnitInstallTest, self).tearDown() - - spack.repo = self.saved_db - - def test_installing_both(self): - mo = MockOutput() - - pkgX.installed = True - pkgY.installed = True - test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=mock_fetch_log) - - self.assertEqual(mo.results, - {bIdX:test_install.TestResult.PASSED, - bIdY:test_install.TestResult.PASSED}) - - - def test_dependency_already_installed(self): - mo = MockOutput() - - pkgX.installed = True - pkgY.installed = True - test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log) - self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED}) - - #TODO: add test(s) where Y fails to install - - -class MockPackageDb(object): - def __init__(self, init=None): - self.specToPkg = {} - if init: - self.specToPkg.update(init) - - def get(self, spec): - return self.specToPkg[spec] - -def mock_fetch_log(path): - return [] diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 764af68400..11a4c0a70c 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -1,4 +1,4 @@ -############################################################################## +##################################################################### # Copyright (c) 2013, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -84,7 +84,10 @@ function spack { if [ "$_sp_arg" = "-h" ]; then command spack cd -h else - cd $(spack location $_sp_arg "$@") + LOC="$(spack location $_sp_arg "$@")" + if [[ -d "$LOC" ]] ; then + cd "$LOC" + fi fi return ;; diff --git a/var/spack/repos/builtin/packages/LuaJIT/package.py b/var/spack/repos/builtin/packages/LuaJIT/package.py new file mode 100644 index 0000000000..7b2a269212 --- /dev/null +++ b/var/spack/repos/builtin/packages/LuaJIT/package.py @@ -0,0 +1,15 @@ +import os +from spack import * + +class Luajit(Package): + """Flast flexible JITed lua""" + homepage = "http://www.luajit.org" + url = "http://luajit.org/download/LuaJIT-2.0.4.tar.gz" + + version('2.0.4', 'dd9c38307f2223a504cbfb96e477eca0') + + def install(self, spec, prefix): + # Linking with the C++ compiler is a dirty hack to deal with the fact + # that unwinding symbols are not included by libc, this is necessary + # on some platforms for the final link stage to work + make("install", "PREFIX=" + prefix, "TARGET_LD=" + os.environ['CXX']) diff --git a/var/spack/repos/builtin/packages/adol-c/openmp_exam.patch b/var/spack/repos/builtin/packages/adol-c/openmp_exam.patch new file mode 100644 index 0000000000..8e21c72d92 --- /dev/null +++ b/var/spack/repos/builtin/packages/adol-c/openmp_exam.patch @@ -0,0 +1,13 @@ +diff --git a/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp b/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp +index fc6fc28..14103d2 100644 +--- a/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp ++++ b/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp +@@ -27,7 +27,7 @@ using namespace std; + #include <ctime> + #include <cmath> + +-#include "adolc.h" ++#include <adolc/adolc.h> + + #ifdef _OPENMP + #include <omp.h> diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py new file mode 100644 index 0000000000..70933542ca --- /dev/null +++ b/var/spack/repos/builtin/packages/adol-c/package.py @@ -0,0 +1,80 @@ +from spack import * +import sys + +class AdolC(Package): + """A package for the automatic differentiation of first and higher derivatives of vector functions in C and C++ programs by operator overloading.""" + homepage = "https://projects.coin-or.org/ADOL-C" + url = "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz" + + version('head', svn='https://projects.coin-or.org/svn/ADOL-C/trunk/') + version('2.6.1', '1032b28427d6e399af4610e78c0f087b') + + variant('doc', default=True, description='Install documentation') + variant('openmp', default=False, description='Enable OpenMP support') + variant('sparse', default=False, description='Enable sparse drivers') + variant('tests', default=True, description='Build all included examples as a test case') + + patch('openmp_exam.patch') + + def install(self, spec, prefix): + make_args = ['--prefix=%s' % prefix] + + # --with-cflags=FLAGS use CFLAGS=FLAGS (default: -O3 -Wall -ansi) + # --with-cxxflags=FLAGS use CXXFLAGS=FLAGS (default: -O3 -Wall) + + if '+openmp' in spec: + if spec.satisfies('%gcc'): + make_args.extend([ + '--with-openmp-flag=-fopenmp' # FIXME: Is this required? -I <path to omp.h> -L <LLVM OpenMP library path> + ]) + else: + raise InstallError("OpenMP flags for compilers other than GCC are not implemented.") + + if '+sparse' in spec: + make_args.extend([ + '--enable-sparse' + ]) + + # We can simply use the bundled examples to check + # whether Adol-C works as expected + if '+tests' in spec: + make_args.extend([ + '--enable-docexa', # Documeted examples + '--enable-addexa' # Additional examples + ]) + if '+openmp' in spec: + make_args.extend([ + '--enable-parexa' # Parallel examples + ]) + + configure(*make_args) + make() + make("install") + + # Copy the config.h file, as some packages might require it + source_directory = self.stage.source_path + config_h = join_path(source_directory,'ADOL-C','src','config.h') + install(config_h, join_path(prefix.include,'adolc')) + + # Install documentation to {prefix}/share + if '+doc' in spec: + install_tree(join_path('ADOL-C','doc'), + join_path(prefix.share,'doc')) + + # Install examples to {prefix}/share + if '+tests' in spec: + install_tree(join_path('ADOL-C','examples'), + join_path(prefix.share,'examples')) + + # Run some examples that don't require user input + # TODO: Check that bundled examples produce the correct results + with working_dir(join_path(source_directory,'ADOL-C','examples')): + Executable('./tapeless_scalar')() + Executable('./tapeless_vector')() + + with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')): + Executable('./checkpointing/checkpointing')() + + if '+openmp' in spec: + with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')): + Executable('./checkpointing/checkpointing')() diff --git a/var/spack/repos/builtin/packages/antlr/package.py b/var/spack/repos/builtin/packages/antlr/package.py new file mode 100644 index 0000000000..c7c7e3e850 --- /dev/null +++ b/var/spack/repos/builtin/packages/antlr/package.py @@ -0,0 +1,47 @@ +from spack import * + +class Antlr(Package): + + homepage = "http://www.antlr.org" + url = "https://github.com/antlr/antlr/tarball/v2.7.7" + + # NOTE: This requires that a system Java be available. + # Spack does not yet know how to install Java compilers + + # Notes from http://nco.sourceforge.net/#bld + # The first steps to build (i.e., compile, for the most part) NCO from + # source code are to install the pre-requisites: ANTLR version 2.7.7 + # (like this one not version 3.x or 4.x!) (required for ncap2)... ANTLR + # binaries from major distributions are pre-built with the source patch + # necessary to allow NCO to link to ANTLR... The ANTLR source file + # CharScanner.hpp must include this line: #include <cstring> or else + # ncap2 will not compile (this tarball is already patched). + version('2.7.7', '914865e853fe8e1e61a9f23d045cb4ab', + # Patched version as described above + url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz') + # Unpatched version + # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz') + + variant('cxx', default=False, description='Enable ANTLR for C++') + variant('java', default=False, description='Enable ANTLR for Java') + variant('python', default=False, description='Enable ANTLR for Python') + variant('csharp', default=False, description='Enable ANTLR for Csharp') + + + def install(self, spec, prefix): + # Check for future enabling of variants + for v in ('+java', '+python', '+csharp'): + if v in spec: + raise Error('Illegal variant %s; for now, Spack only knows how to build antlr or antlr+cxx') + + config_args = [ + '--prefix=%s' % prefix, + '--%s-cxx' % ('enable' if '+cxx' in spec else 'disable'), + '--%s-java' % ('enable' if '+java' in spec else 'disable'), + '--%s-python' % ('enable' if '+python' in spec else 'disable'), + '--%s-csharp' % ('enable' if '+csharp' in spec else 'disable')] + + # which('autoreconf')('-iv') + configure(*config_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py index 7260fd74a1..5274fc018f 100644 --- a/var/spack/repos/builtin/packages/astyle/package.py +++ b/var/spack/repos/builtin/packages/astyle/package.py @@ -14,4 +14,5 @@ class Astyle(Package): make('-f', join_path(self.stage.source_path,'build','clang','Makefile'), parallel=False) + mkdirp(self.prefix.bin) install(join_path(self.stage.source_path, 'src','bin','astyle'), self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/autoconf/package.py b/var/spack/repos/builtin/packages/autoconf/package.py index 6412e810a6..b5e29b8a27 100644 --- a/var/spack/repos/builtin/packages/autoconf/package.py +++ b/var/spack/repos/builtin/packages/autoconf/package.py @@ -8,6 +8,8 @@ class Autoconf(Package): version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b') version('2.62', '6c1f3b3734999035d77da5024aab4fbd') + depends_on("m4") + def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py new file mode 100644 index 0000000000..9c9fbeedcf --- /dev/null +++ b/var/spack/repos/builtin/packages/bash/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Bash(Package): + """The GNU Project's Bourne Again SHell.""" + + homepage = "https://www.gnu.org/software/bash/" + url = "ftp://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz" + + version('4.3', '81348932d5da294953e15d4814c74dd1') + + depends_on('readline') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--with-curses', + '--with-installed-readline=%s' % spec['readline'].prefix) + + make() + make("tests") + make("install") diff --git a/var/spack/repos/builtin/packages/bbcp/package.py b/var/spack/repos/builtin/packages/bbcp/package.py new file mode 100644 index 0000000000..e9baa5ccf4 --- /dev/null +++ b/var/spack/repos/builtin/packages/bbcp/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Bbcp(Package): + """Securely and quickly copy data from source to target""" + homepage = "http://www.slac.stanford.edu/~abh/bbcp/" + + version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git', branch="master") + + def install(self, spec, prefix): + cd("src") + make() + # BBCP wants to build the executable in a directory whose name depends on the system type + makesname = Executable("../MakeSname") + bbcp_executable_path = "../bin/%s/bbcp" % makesname(output=str).rstrip("\n") + destination_path = "%s/bin/" % prefix + mkdirp(destination_path) + install(bbcp_executable_path, destination_path) diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index 897539a439..158d722046 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -12,6 +12,10 @@ class Binutils(Package): version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') + depends_on('m4') + depends_on('flex') + depends_on('bison') + # Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell variant('krellpatch', default=False, description="build with openspeedshop based patch.") variant('gold', default=True, description="build the gold linker") @@ -25,6 +29,7 @@ class Binutils(Package): configure_args = [ '--prefix=%s' % prefix, '--disable-dependency-tracking', + '--disable-werror', '--enable-interwork', '--enable-multilib', '--enable-shared', diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py index 7c526fb958..9a2ddcbf96 100644 --- a/var/spack/repos/builtin/packages/bison/package.py +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -10,6 +10,8 @@ class Bison(Package): version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') + depends_on("m4") + def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py index 7b07933911..90789a98f2 100644 --- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py +++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py @@ -1,5 +1,5 @@ ################################################################################ -# Copyright (c) 2015 Krell Institute. All Rights Reserved. +# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -24,43 +24,83 @@ class CbtfArgonavis(Package): homepage = "http://sourceforge.net/p/cbtf/wiki/Home/" # Mirror access template example - #url = "file:/g/g24/jeg/cbtf-argonavis-1.5.tar.gz" - #version('1.5', '1f7f6512f55409ed2135cfceabe26b82') + #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz" + #version('1.6', '0fafa0008478405c2c2319450f174ed4') - version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-argonavis/cbtf-argonavis') + version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git') - depends_on("cmake@3.0.2:") + depends_on("cmake@3.0.2") + depends_on("boost@1.50.0:") depends_on("papi") + depends_on("mrnet@5.0.1:+lwthreads+krellpatch") depends_on("cbtf") depends_on("cbtf-krell") - depends_on("cuda") + depends_on("cuda@6.0.37") + #depends_on("cuda") parallel = False + def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): + # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + + compile_flags="-O2 -g" + BuildTypeOptions = [] + + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + for word in cmakeOptions[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_CXX_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_C_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): + cmakeOptions.remove(word) + BuildTypeOptions.extend([ + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags + ]) + + cmakeOptions.extend(BuildTypeOptions) + + def install(self, spec, prefix): # Look for package installation information in the cbtf and cbtf-krell prefixes cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) - # FIXME, hard coded for testing purposes, we will alter when the external package feature is available - cuda_prefix_path = "/usr/local/cudatoolkit-6.0" - cupti_prefix_path = "/usr/local/cudatoolkit-6.0/extras/CUPTI" - - with working_dir('CUDA'): with working_dir('build', create=True): - cmake('..', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DCUDA_INSTALL_PATH=%s' % cuda_prefix_path, - '-DCUDA_ROOT=%s' % cuda_prefix_path, - '-DCUPTI_ROOT=%s' % cupti_prefix_path, - '-DCUDA_DIR=%s' % cuda_prefix_path, - '-DPAPI_ROOT=%s' % spec['papi'].prefix, - '-DCBTF_PREFIX=%s' % spec['cbtf'].prefix, - *std_cmake_args) - make("clean") - make() - make("install") + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DCUDA_DIR=%s' % spec['cuda'].prefix, + '-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix, + '-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix, + '-DCUPTI_DIR=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'), + '-DCUPTI_ROOT=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'), + '-DPAPI_ROOT=%s' % spec['papi'].prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DBoost_DIR=%s' % spec['boost'].prefix, + '-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DBoost_NO_SYSTEM_PATHS=ON' + ]) + + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) + + # Adjust the standard cmake arguments to what we want the build type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) + + make("clean") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py index 9458ac113c..e6050cb4a9 100644 --- a/var/spack/repos/builtin/packages/cbtf-krell/package.py +++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py @@ -1,5 +1,5 @@ ################################################################################ -# Copyright (c) 2015 Krell Institute. All Rights Reserved. +# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -26,21 +26,30 @@ class CbtfKrell(Package): homepage = "http://sourceforge.net/p/cbtf/wiki/Home/" # optional mirror access template - #url = "file:/g/g24/jeg/cbtf-krell-1.5.tar.gz" - #version('1.5', 'b13f6df6a93c44149d977773dd776d2f') + #url = "file:/home/jeg/cbtf-krell-1.6.tar.gz" + #version('1.6', 'edeb61cd488f16e7b124f77db9ce762d') - version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-krell/cbtf-krell') + version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-krell.git') + # MPI variants + variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.") + variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.") + variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.") + variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.") + variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.") + variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.") # Dependencies for cbtf-krell + depends_on("cmake@3.0.2") # For binutils service depends_on("binutils@2.24+krellpatch") # collectionTool - depends_on("boost@1.50.0") - depends_on("dyninst@8.2.1") - depends_on("mrnet@4.1.0:+lwthreads") + depends_on("boost@1.50.0:") + depends_on("dyninst@8.2.1:") + depends_on("mrnet@5.0.1:+lwthreads+krellpatch") + depends_on("xerces-c@3.1.1:") depends_on("cbtf") @@ -51,66 +60,207 @@ class CbtfKrell(Package): # MPI Installations # These have not worked either for build or execution, commenting out for now - #depends_on("openmpi") - #depends_on("mvapich2@2.0") - #depends_on("mpich") + depends_on("openmpi", when='+openmpi') + depends_on("mpich", when='+mpich') + depends_on("mpich2", when='+mpich2') + depends_on("mvapich2", when='+mvapich2') + depends_on("mvapich", when='+mvapich') + depends_on("mpt", when='+mpt') parallel = False + def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): + # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + + compile_flags="-O2 -g" + BuildTypeOptions = [] + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + for word in cmakeOptions[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_CXX_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_C_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): + cmakeOptions.remove(word) + BuildTypeOptions.extend([ + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags + ]) + + cmakeOptions.extend(BuildTypeOptions) + + + + def set_mpi_cmakeOptions(self, spec, cmakeOptions): + # Appends to cmakeOptions the options that will enable the appropriate MPI implementations + + MPIOptions = [] + + # openmpi + if '+openmpi' in spec: + MPIOptions.extend([ + '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix + ]) + # mpich + if '+mpich' in spec: + MPIOptions.extend([ + '-DMPICH_DIR=%s' % spec['mpich'].prefix + ]) + # mpich2 + if '+mpich2' in spec: + MPIOptions.extend([ + '-DMPICH2_DIR=%s' % spec['mpich2'].prefix + ]) + # mvapich + if '+mvapich' in spec: + MPIOptions.extend([ + '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix + ]) + # mvapich2 + if '+mvapich2' in spec: + MPIOptions.extend([ + '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix + ]) + # mpt + if '+mpt' in spec: + MPIOptions.extend([ + '-DMPT_DIR=%s' % spec['mpt'].prefix + ]) + + cmakeOptions.extend(MPIOptions) + def install(self, spec, prefix): # Add in paths for finding package config files that tell us where to find these packages - cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix) - - # FIXME - hard code path until external package support is available - # Need to change this path and/or add additional paths for MPI experiment support on different platforms - #openmpi_prefix_path = "/opt/openmpi-1.8.2" - #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu" - - # Other possibilities, they will need a -DMVAPICH_DIR=, etc clause in the cmake command to be recognized - # mvapich_prefix_path = "<mvapich install path>" - # mvapich2_prefix_path = "<mvapich2 install path>" - # mpich2_prefix_path = "<mpich2 install path>" - # mpich_prefix_path = "<mpich install path>" - # mpt_prefix_path = "<mpt install path>" - - # Add in paths for cuda if requested via the cuda variant - # FIXME - hard code path until external package support is available - #if '+cuda' in spec: - # cuda_prefix_path = "/usr/local/cuda-6.0" - # cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI" - #else: - # cuda_prefix_path = "" - # cupti_prefix_path = "" - - #'-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix, - #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, - #'-DMPICH_DIR=%s' % spec['mpich'].prefix, - #'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - #'-DOPENMPI_DIR=%s' % openmpi_prefix_path, - #'-DMVAPICH_DIR=%s' % mvapich_prefix_path, - #'-DLIB_SUFFIX=64', - #'-DCUDA_DIR=%s' % cuda_prefix_path, - #'-DCUPTI_DIR=%s' % cupti_prefix_path, + #cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix) + #'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path # Build cbtf-krell with cmake with working_dir('build_cbtf_krell', create=True): - cmake('..', - '-DCMAKE_BUILD_TYPE=Debug', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix, - '-DBOOST_DIR=%s' % spec['boost'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - *std_cmake_args) + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix, + '-DBOOST_DIR=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix + ]) + + + # Add any MPI implementations coming from variant settings + self.set_mpi_cmakeOptions(spec, cmakeOptions) + + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) + + # Adjust the standard cmake arguments to what we want the build type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) make("clean") make() make("install") + + + #if '+cray' in spec: + #if 'cray' in self.spec.architecture: + # if '+runtime' in spec: + # with working_dir('build_cbtf_cray_runtime', create=True): + # python_vers='%d.%d' % spec['python'].version[:2] + # cmake .. \ + # -DCMAKE_BUILD_TYPE=Debug \ + # -DTARGET_OS="cray" \ + # -DRUNTIME_ONLY="true" \ + # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ + # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ + # -DCBTF_DIR=${CBTF_ROOT} \ + # -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \ + # -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \ + # -DBINUTILS_DIR=${KRELL_ROOT} \ + # -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \ + # -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \ + # -DPAPI_DIR=${PAPI_ROOT} \ + # -DDYNINST_DIR=${DYNINST_CN_ROOT} \ + # -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \ + # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 + # else: + # with working_dir('build_cbtf_cray_frontend', create=True): + # python_vers='%d.%d' % spec['python'].version[:2] + # cmake .. \ + # -DCMAKE_BUILD_TYPE=Debug \ + # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ + # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ + # -DCBTF_DIR=${CBTF_ROOT} \ + # -DRUNTIME_TARGET_OS="cray" \ + # -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \ + # -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \ + # -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \ + # -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \ + # -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \ + # -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \ + # -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \ + # -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \ + # -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \ + # -DBOOST_ROOT=/${KRELL_ROOT} \ + # -DXERCESC_DIR=/${KRELL_ROOT} \ + # -DBINUTILS_DIR=/${KRELL_ROOT} \ + # -DLIBMONITOR_DIR=${KRELL_ROOT} \ + # -DLIBUNWIND_DIR=${KRELL_ROOT} \ + # -DPAPI_DIR=${PAPI_ROOT} \ + # -DDYNINST_DIR=${KRELL_ROOT} \ + # -DMRNET_DIR=${KRELL_ROOT} \ + # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 + # fi +# +# make("clean") +# make() +# make("install") +# +# elif '+mic' in spec: +# if '+runtime' in spec: +# with working_dir('build_cbtf_mic_runtime', create=True): +# python_vers='%d.%d' % spec['python'].version[:2] +# cmake .. \ +# +# else: +# with working_dir('build_cbtf_cray_frontend', create=True): +# python_vers='%d.%d' % spec['python'].version[:2] +# cmake .. \ +# fi +# +# else: +# # Build cbtf-krell with cmake +# with working_dir('build_cbtf_krell', create=True): +# cmake('..', +# '-DCMAKE_BUILD_TYPE=Debug', +# '-DCMAKE_INSTALL_PREFIX=%s' % prefix, +# '-DCBTF_DIR=%s' % spec['cbtf'].prefix, +# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, +# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, +# '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, +# '-DPAPI_DIR=%s' % spec['papi'].prefix, +# '-DBOOST_DIR=%s' % spec['boost'].prefix, +# '-DMRNET_DIR=%s' % spec['mrnet'].prefix, +# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, +# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, +# '-DOPENMPI_DIR=%s' % openmpi_prefix_path, +# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, +# *std_cmake_args) +# +# make("clean") +# make() +# make("install") +# +# fi +# diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py index 2da9e8a1f7..5ca88601f3 100644 --- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py +++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py @@ -1,5 +1,5 @@ ################################################################################ -# Copyright (c) 2015 Krell Institute. All Rights Reserved. +# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -29,32 +29,65 @@ class CbtfLanl(Package): version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl') - + depends_on("cmake@3.0.2") # Dependencies for cbtf-krell - depends_on("boost@1.50") - depends_on("mrnet@4.1.0:+lwthreads") + depends_on("mrnet@5.0.1:+lwthreads+krellpatch") depends_on("xerces-c@3.1.1:") depends_on("cbtf") depends_on("cbtf-krell") parallel = False + def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): + # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + + compile_flags="-O2 -g" + BuildTypeOptions = [] + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + for word in cmakeOptions[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_CXX_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_C_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): + cmakeOptions.remove(word) + BuildTypeOptions.extend([ + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags + ]) + + cmakeOptions.extend(BuildTypeOptions) + def install(self, spec, prefix): # Add in paths for finding package config files that tell us where to find these packages cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) with working_dir('build', create=True): - cmake('..', - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'), - *std_cmake_args) - - make("clean") - make() - make("install") + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') + ]) + + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) + + # Adjust the standard cmake arguments to what we want the build type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) + + make("clean") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py index 52e6a07020..7ce1cd382b 100644 --- a/var/spack/repos/builtin/packages/cbtf/package.py +++ b/var/spack/repos/builtin/packages/cbtf/package.py @@ -1,5 +1,5 @@ ################################################################################ -# Copyright (c) 2015 Krell Institute. All Rights Reserved. +# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -25,21 +25,44 @@ class Cbtf(Package): homepage = "http://sourceforge.net/p/cbtf/wiki/Home" # Mirror access template example - #url = "file:/g/g24/jeg/cbtf-1.5.tar.gz" - #version('1.6', '1ca88a8834759c4c74452cb97fe7b70a') + #url = "file:/home/jeg/cbtf-1.6.tar.gz" + #version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987') # Use when the git repository is available - version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf') + version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf.git') - depends_on("cmake") - #depends_on("boost@1.42.0:") - depends_on("boost@1.50.0") - depends_on("mrnet@4.1.0+lwthreads") + variant('runtime', default=False, description="build only the runtime libraries and collectors.") + + depends_on("cmake@3.0.2") + depends_on("boost@1.50.0:") + depends_on("mrnet@5.0.1:+lwthreads+krellpatch") depends_on("xerces-c@3.1.1:") - depends_on("libxml2") + # Work around for spack libxml2 package bug, take off python when fixed + depends_on("libxml2+python") parallel = False + def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): + # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + + compile_flags="-O2 -g" + BuildTypeOptions = [] + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + for word in cmakeOptions[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_CXX_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_C_FLAGS'): + cmakeOptions.remove(word) + BuildTypeOptions.extend([ + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags + ]) + + cmakeOptions.extend(BuildTypeOptions) + def install(self, spec, prefix): with working_dir('build', create=True): @@ -48,14 +71,45 @@ class Cbtf(Package): # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT. # Defaults to OFF. - cmake('..', - '--debug-output', - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'), - *std_cmake_args) + if '+runtime' in spec: + # Install message tag include file for use in Intel MIC cbtf-krell build + # FIXME + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') + ]) + + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) + + # Adjust the standard cmake arguments to what we want the build type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) + + else: + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') + ]) + + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) + + # Adjust the standard cmake arguments to what we want the build type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) make("clean") make() diff --git a/var/spack/repos/builtin/packages/cereal/package.py b/var/spack/repos/builtin/packages/cereal/package.py index a83927456f..6acbf666c8 100644 --- a/var/spack/repos/builtin/packages/cereal/package.py +++ b/var/spack/repos/builtin/packages/cereal/package.py @@ -1,4 +1,5 @@ from spack import * +import os import shutil class Cereal(Package): @@ -30,5 +31,8 @@ class Cereal(Package): # Install shutil.rmtree(join_path(prefix, 'doc'), ignore_errors=True) shutil.rmtree(join_path(prefix, 'include'), ignore_errors=True) + shutil.rmtree(join_path(prefix, 'lib'), ignore_errors=True) shutil.copytree('doc', join_path(prefix, 'doc'), symlinks=True) shutil.copytree('include', join_path(prefix, 'include'), symlinks=True) + # Create empty directory to avoid linker warnings later + os.mkdir(join_path(prefix, 'lib')) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 91a4e3b415..2493cf0a13 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -30,6 +30,7 @@ class Cmake(Package): homepage = 'https://www.cmake.org' url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02') version('3.5.1', 'ca051f4a66375c89d1a524e726da0296') version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e') version('3.4.3', '4cb3ff35b2472aae70f542116d616e63') diff --git a/var/spack/repos/builtin/packages/cnmem/package.py b/var/spack/repos/builtin/packages/cnmem/package.py new file mode 100644 index 0000000000..0a83e8fc20 --- /dev/null +++ b/var/spack/repos/builtin/packages/cnmem/package.py @@ -0,0 +1,12 @@ +from spack import * + +class Cnmem(Package): + """CNMem mempool for CUDA devices""" + homepage = "https://github.com/NVIDIA/cnmem" + + version('git', git='https://github.com/NVIDIA/cnmem.git', branch="master") + + def install(self, spec, prefix): + cmake('.',*std_cmake_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py index bc83cb2b65..c2778e14da 100644 --- a/var/spack/repos/builtin/packages/cryptopp/package.py +++ b/var/spack/repos/builtin/packages/cryptopp/package.py @@ -13,6 +13,7 @@ class Cryptopp(Package): version('5.6.3', '3c5b70e2ec98b7a24988734446242d07') version('5.6.2', '7ed022585698df48e65ce9218f6c6a67') + version('5.6.1', '96cbeba0907562b077e26bcffb483828') def install(self, spec, prefix): make() diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 1f763ad358..df8330384d 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -12,6 +12,7 @@ class Dealii(Package): variant('mpi', default=True, description='Compile with MPI') variant('arpack', default=True, description='Compile with Arpack and PArpack (only with MPI)') variant('doc', default=False, description='Compile with documentation') + variant('gsl' , default=True, description='Compile with GSL') variant('hdf5', default=True, description='Compile with HDF5 (only with MPI)') variant('metis', default=True, description='Compile with Metis') variant('netcdf', default=True, description='Compile with Netcdf (only with MPI)') @@ -39,8 +40,10 @@ class Dealii(Package): depends_on ("mpi", when="+mpi") depends_on ("arpack-ng+mpi", when='+arpack+mpi') depends_on ("doxygen", when='+doc') + depends_on ("gsl", when='@8.5.0:+gsl') + depends_on ("gsl", when='@dev+gsl') depends_on ("hdf5+mpi~cxx", when='+hdf5+mpi') #FIXME NetCDF declares dependency with ~cxx, why? - depends_on ("metis", when='+metis') + depends_on ("metis@5:", when='+metis') depends_on ("netcdf+mpi", when="+netcdf+mpi") depends_on ("netcdf-cxx", when='+netcdf+mpi') depends_on ("oce", when='+oce') @@ -50,8 +53,8 @@ class Dealii(Package): depends_on ("trilinos", when='+trilinos+mpi') # developer dependnecies - #depends_on ("numdiff") #FIXME - #depends_on ("astyle") #FIXME + depends_on ("numdiff", when='@dev') + depends_on ("astyle@2.04", when='@dev') def install(self, spec, prefix): options = [] @@ -80,7 +83,6 @@ class Dealii(Package): (join_path(spec['lapack'].prefix.lib,'liblapack.%s' % dsuf), # FIXME don't hardcode names join_path(spec['blas'].prefix.lib,'libblas.%s' % dsuf)), # FIXME don't hardcode names '-DMUPARSER_DIR=%s ' % spec['muparser'].prefix, - '-DP4EST_DIR=%s' % spec['p4est'].prefix, '-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix, '-DTBB_DIR=%s' % spec['tbb'].prefix, '-DZLIB_DIR=%s' % spec['zlib'].prefix @@ -100,7 +102,7 @@ class Dealii(Package): ]) # Optional dependencies for which librariy names are the same as CMake variables - for library in ('hdf5', 'p4est','petsc', 'slepc','trilinos','metis'): + for library in ('gsl','hdf5','p4est','petsc','slepc','trilinos','metis'): if library in spec: options.extend([ '-D{library}_DIR={value}'.format(library=library.upper(), value=spec[library].prefix), @@ -251,3 +253,6 @@ class Dealii(Package): cmake('.') make('release') make('run',parallel=False) + + def setup_environment(self, spack_env, env): + env.set('DEAL_II_DIR', self.prefix) diff --git a/var/spack/repos/builtin/packages/dia/package.py b/var/spack/repos/builtin/packages/dia/package.py index 1cb5910e46..25d5f08205 100644 --- a/var/spack/repos/builtin/packages/dia/package.py +++ b/var/spack/repos/builtin/packages/dia/package.py @@ -7,6 +7,7 @@ class Dia(Package): version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c') + depends_on('intltool') depends_on('gtkplus@2.6.0:') depends_on('cairo') #depends_on('libart') # optional dependency, not yet supported by spack. diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py index 8d6e672f86..6b38ab0261 100644 --- a/var/spack/repos/builtin/packages/eigen/package.py +++ b/var/spack/repos/builtin/packages/eigen/package.py @@ -45,7 +45,8 @@ class Eigen(Package): # TODO : dependency on googlehash, superlu, adolc missing - depends_on('metis', when='+metis') + depends_on('cmake') + depends_on('metis@5:', when='+metis') depends_on('scotch', when='+scotch') depends_on('fftw', when='+fftw') depends_on('suite-sparse', when='+suitesparse') diff --git a/var/spack/repos/builtin/packages/elk/package.py b/var/spack/repos/builtin/packages/elk/package.py new file mode 100644 index 0000000000..1d9216fd1a --- /dev/null +++ b/var/spack/repos/builtin/packages/elk/package.py @@ -0,0 +1,122 @@ +from spack import * +import spack + +class Elk(Package): + '''An all-electron full-potential linearised augmented-plane wave + (FP-LAPW) code with many advanced features.''' + + homepage = 'http://elk.sourceforge.net/' + url = 'https://sourceforge.net/projects/elk/files/elk-3.3.17.tgz' + + version('3.3.17', 'f57f6230d14f3b3b558e5c71f62f0592') + + # Elk provides these libraries, but allows you to specify your own + variant('blas', default=True, description='Build with custom BLAS library') + variant('lapack', default=True, description='Build with custom LAPACK library') + variant('fft', default=True, description='Build with custom FFT library') + + # Elk does not provide these libraries, but allows you to use them + variant('mpi', default=True, description='Enable MPI parallelism') + variant('openmp', default=True, description='Enable OpenMP support') + variant('libxc', default=True, description='Link to Libxc functional library') + + depends_on('blas', when='+blas') + depends_on('lapack', when='+lapack') + depends_on('fftw', when='+fft') + depends_on('mpi', when='+mpi') + depends_on('libxc', when='+libxc') + + # Cannot be built in parallel + parallel = False + + + def configure(self, spec): + # Dictionary of configuration options + config = { + 'MAKE': 'make', + 'F90': join_path(spack.build_env_path, 'f90'), + 'F77': join_path(spack.build_env_path, 'f77'), + 'AR': 'ar', + 'LIB_FFT': 'fftlib.a', + 'SRC_MPI': 'mpi_stub.f90', + 'SRC_OMP': 'omp_stub.f90', + 'SRC_libxc': 'libxcifc_stub.f90', + 'SRC_FFT': 'zfftifc.f90' + } + + # Compiler-specific flags + flags = '' + if self.compiler.name == 'intel': + flags = '-O3 -ip -unroll -no-prec-div -openmp' + elif self.compiler.name == 'gcc': + flags = '-O3 -ffast-math -funroll-loops -fopenmp' + elif self.compiler.name == 'pgi': + flags = '-O3 -mp -lpthread' + elif self.compiler.name == 'g95': + flags = '-O3 -fno-second-underscore' + elif self.compiler.name == 'nag': + flags = '-O4 -kind=byte -dusty -dcfuns' + elif self.compiler.name == 'xl': + flags = '-O3 -qsmp=omp' + config['F90_OPTS'] = flags + config['F77_OPTS'] = flags + + # BLAS/LAPACK support + blas = 'blas.a' + lapack = 'lapack.a' + if '+blas' in spec: + blas = join_path(spec['blas'].prefix.lib, 'libblas.so') + if '+lapack' in spec: + lapack = join_path(spec['lapack'].prefix.lib, 'liblapack.so') + config['LIB_LPK'] = ' '.join([lapack, blas]) # lapack must come before blas + + # FFT support + if '+fft' in spec: + config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib, 'libfftw3.so') + config['SRC_FFT'] = 'zfftifc_fftw.f90' + + # MPI support + if '+mpi' in spec: + config.pop('SRC_MPI') + config['F90'] = join_path(spec['mpi'].prefix.bin, 'mpif90') + config['F77'] = join_path(spec['mpi'].prefix.bin, 'mpif77') + + # OpenMP support + if '+openmp' in spec: + config.pop('SRC_OMP') + + # Libxc support + if '+libxc' in spec: + config['LIB_libxc'] = ' '.join([ + join_path(spec['libxc'].prefix.lib, 'libxcf90.so'), + join_path(spec['libxc'].prefix.lib, 'libxc.so') + ]) + config['SRC_libxc'] = ' '.join([ + 'libxc_funcs.f90', + 'libxc.f90', + 'libxcifc.f90' + ]) + + # Write configuration options to include file + with open('make.inc', 'w') as inc: + for key in config: + inc.write('{0} = {1}\n'.format(key, config[key])) + + + def install(self, spec, prefix): + # Elk only provides an interactive setup script + self.configure(spec) + + make() + make('test') + + # The Elk Makefile does not provide an install target + mkdirp(prefix.bin) + + install('src/elk', prefix.bin) + install('src/eos/eos', prefix.bin) + install('src/spacegroup/spacegroup', prefix.bin) + + install_tree('examples', join_path(prefix, 'examples')) + install_tree('species', join_path(prefix, 'species')) + diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py index b065904912..e4795893e0 100644 --- a/var/spack/repos/builtin/packages/flex/package.py +++ b/var/spack/repos/builtin/packages/flex/package.py @@ -6,6 +6,7 @@ class Flex(Package): homepage = "http://flex.sourceforge.net/" url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz" + version('2.6.0', '5724bcffed4ebe39e9b55a9be80859ec') version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index 6043b62279..8f90757232 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -38,6 +38,7 @@ class Gcc(Package): list_url = 'http://open-source-box.org/gcc/' list_depth = 2 + version('6.1.0', '8fb6cb98b8459f5863328380fbf06bd1') version('5.3.0', 'c9616fd448f980259c31de613e575719') version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') version('4.9.3', '6f831b4d251872736e8e9cc09746f327') diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py index b346fe80c2..0e9e8fc099 100644 --- a/var/spack/repos/builtin/packages/gdb/package.py +++ b/var/spack/repos/builtin/packages/gdb/package.py @@ -34,6 +34,7 @@ class Gdb(Package): homepage = "https://www.gnu.org/software/gdb" url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz" + version('7.11', 'f585059252836a981ea5db9a5f8ce97f') version('7.10.1', 'b93a2721393e5fa226375b42d567d90b') version('7.10', 'fa6827ad0fd2be1daa418abb11a54d86') version('7.9.1', 'f3b97de919a9dba84490b2e076ec4cb0') diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 388f84aefd..77521fd658 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -7,7 +7,8 @@ class Git(Package): homepage = "http://git-scm.com" url = "https://github.com/git/git/tarball/v2.7.1" - version('2.8.0-rc2', 'c2cf9f2cc70e35f2fafbaf9258f82e4c') + version('2.8.1', '1308448d95afa41a4135903f22262fc8') + version('2.8.0', 'eca687e46e9750121638f258cff8317b') version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6') version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060') @@ -23,18 +24,10 @@ class Git(Package): #version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c') - # Git compiles with curl support by default on but if your system - # does not have it you will not be able to clone https repos - variant("curl", default=False, description="Add the internal support of curl for https clone") - - # Git compiles with expat support by default on but if your system - # does not have it you will not be able to push https repos - variant("expat", default=False, description="Add the internal support of expat for https push") - depends_on("openssl") depends_on("autoconf") - depends_on("curl", when="+curl") - depends_on("expat", when="+expat") + depends_on("curl") + depends_on("expat") # Also depends_on gettext: apt-get install gettext (Ubuntu) @@ -49,23 +42,12 @@ class Git(Package): "--prefix=%s" % prefix, "--without-pcre", "--with-openssl=%s" % spec['openssl'].prefix, - "--with-zlib=%s" % spec['zlib'].prefix + "--with-zlib=%s" % spec['zlib'].prefix, + "--with-curl=%s" % spec['curl'].prefix, + "--with-expat=%s" % spec['expat'].prefix, ] - if '+curl' in spec: - configure_args.append("--with-curl=%s" % spec['curl'].prefix) - - if '+expat' in spec: - configure_args.append("--with-expat=%s" % spec['expat'].prefix) - which('autoreconf')('-i') configure(*configure_args) make() make("install") - - - - - - - diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 67ead5f941..a3fc3f79eb 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -1,4 +1,5 @@ from spack import * +import sys class Glib(Package): """The GLib package contains a low-level libraries useful for @@ -12,6 +13,8 @@ class Glib(Package): depends_on("libffi") depends_on("zlib") + depends_on("pkg-config") + depends_on('gettext', sys.platform=='darwin') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/glm/package.py b/var/spack/repos/builtin/packages/glm/package.py index d00c301b4c..ecae89f1e8 100644 --- a/var/spack/repos/builtin/packages/glm/package.py +++ b/var/spack/repos/builtin/packages/glm/package.py @@ -11,6 +11,8 @@ class Glm(Package): url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz" version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8') + + depends_on ("cmake") def install(self, spec, prefix): with working_dir('spack-build', create=True): diff --git a/var/spack/repos/builtin/packages/global/package.py b/var/spack/repos/builtin/packages/global/package.py index e8f06516d9..aac1cede30 100644 --- a/var/spack/repos/builtin/packages/global/package.py +++ b/var/spack/repos/builtin/packages/global/package.py @@ -11,6 +11,7 @@ class Global(Package): version('6.5', 'dfec818b4f53d91721e247cf7b218078') depends_on('exuberant-ctags') + depends_on('ncurses') def install(self, spec, prefix): config_args = ['--prefix={0}'.format(prefix)] diff --git a/var/spack/repos/builtin/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py index fe13de3b95..85e9c237d6 100644 --- a/var/spack/repos/builtin/packages/gmp/package.py +++ b/var/spack/repos/builtin/packages/gmp/package.py @@ -35,6 +35,8 @@ class Gmp(Package): version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470') version('6.0.0' , '6ef5869ae735db9995619135bd856b84') + depends_on("m4") + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py index 9d759303cb..5f659c56df 100644 --- a/var/spack/repos/builtin/packages/gmsh/package.py +++ b/var/spack/repos/builtin/packages/gmsh/package.py @@ -63,6 +63,11 @@ class Gmsh(Package): build_directory = join_path(self.stage.path, 'spack-build') source_directory = self.stage.source_path + options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix) + + # Prevent GMsh from using its own strange directory structure on OSX + options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF') + if '+shared' in spec: options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON', '-DENABLE_BUILD_DYNAMIC:BOOL=ON']) # Builds dynamic executable and installs shared library diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index f26e225b83..470969832f 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -38,7 +38,7 @@ class Hdf5(Package): list_depth = 3 version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199') - version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618') + version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True) version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') version('1.8.13', 'c03426e9e77d7766944654280b467289') @@ -101,10 +101,10 @@ class Hdf5(Package): extra_args.append('--enable-cxx') if '+fortran' in spec: - extra_args.extend([ - '--enable-fortran', - '--enable-fortran2003' - ]) + extra_args.append('--enable-fortran') + # '--enable-fortran2003' no longer exists as of version 1.10.0 + if spec.satisfies('@:1.8.16'): + extra_args.append('--enable-fortran2003') if '+mpi' in spec: # The HDF5 configure script warns if cxx and mpi are enabled diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py index ab7205646e..a461a7482c 100644 --- a/var/spack/repos/builtin/packages/hwloc/package.py +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -17,6 +17,7 @@ class Hwloc(Package): list_url = "http://www.open-mpi.org/software/hwloc/" list_depth = 3 + version('1.11.3', 'c1d36a9de6028eac1d18ea4782ef958f') version('1.11.2', 'e4ca55c2a5c5656da4a4e37c8fc51b23') version('1.11.1', 'feb4e416a1b25963ed565d8b42252fdc') version('1.9', '1f9f9155682fe8946a97c08896109508') diff --git a/var/spack/repos/builtin/packages/hydra/package.py b/var/spack/repos/builtin/packages/hydra/package.py new file mode 100644 index 0000000000..c1b8868276 --- /dev/null +++ b/var/spack/repos/builtin/packages/hydra/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Hydra(Package): + """Hydra is a process management system for starting parallel jobs. + Hydra is designed to natively work with existing launcher daemons + (such as ssh, rsh, fork), as well as natively integrate with resource + management systems (such as slurm, pbs, sge).""" + + homepage = "http://www.mpich.org" + url = "http://www.mpich.org/static/downloads/3.2/hydra-3.2.tar.gz" + list_url = "http://www.mpich.org/static/downloads/" + list_depth = 2 + + version('3.2', '4d670916695bf7e3a869cc336a881b39') + + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/intltool/package.py b/var/spack/repos/builtin/packages/intltool/package.py new file mode 100644 index 0000000000..9b3c095378 --- /dev/null +++ b/var/spack/repos/builtin/packages/intltool/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Intltool(Package): + """intltool is a set of tools to centralize translation of many different file formats using GNU gettext-compatible PO files.""" + homepage = 'https://freedesktop.org/wiki/Software/intltool/' + + version('0.51.0', '12e517cac2b57a0121cda351570f1e63') + + def url_for_version(self, version): + """Handle version-based custom URLs.""" + return 'https://launchpad.net/intltool/trunk/%s/+download/intltool-%s.tar.gz' % (version, version) + + def install(self, spec, prefix): + + # configure, build, install: + options = ['--prefix=%s' % prefix ] + configure(*options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/ior/package.py b/var/spack/repos/builtin/packages/ior/package.py new file mode 100644 index 0000000000..c46650a674 --- /dev/null +++ b/var/spack/repos/builtin/packages/ior/package.py @@ -0,0 +1,42 @@ +from spack import * +import os + +class Ior(Package): + """The IOR software is used for benchmarking parallel file systems + using POSIX, MPI-IO, or HDF5 interfaces.""" + + homepage = "https://github.com/LLNL/ior" + url = "https://github.com/LLNL/ior/archive/3.0.1.tar.gz" + + version('3.0.1', '71150025e0bb6ea1761150f48b553065') + + variant('hdf5', default=False, description='support IO with HDF5 backend') + variant('ncmpi', default=False, description='support IO with NCMPI backend') + + depends_on('mpi') + depends_on('hdf5+mpi', when='+hdf5') + depends_on('netcdf+mpi', when='+ncmpi') + + + def install(self, spec, prefix): + os.system('./bootstrap') + + config_args = [ + 'MPICC=%s' % spec['mpi'].prefix.bin + '/mpicc', + '--prefix=%s' % prefix, + ] + + if '+hdf5' in spec: + config_args.append('--with-hdf5') + else: + config_args.append('--without-hdf5') + + if '+ncmpi' in spec: + config_args.append('--with-ncmpi') + else: + config_args.append('--without-ncmpi') + + configure(*config_args) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py index 8cec9ea75b..9cb0fd1f40 100644 --- a/var/spack/repos/builtin/packages/jemalloc/package.py +++ b/var/spack/repos/builtin/packages/jemalloc/package.py @@ -5,6 +5,7 @@ class Jemalloc(Package): homepage = "http://www.canonware.com/jemalloc/" url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2" + version('4.1.0', 'c4e53c947905a533d5899e5cc3da1f94') version('4.0.4', '687c5cc53b9a7ab711ccd680351ff988') variant('stats', default=False, description='Enable heap statistics') @@ -20,5 +21,8 @@ class Jemalloc(Package): configure(*configure_args) + # Don't use -Werror + filter_file(r'-Werror=\S*', '', 'Makefile') + make() make("install") diff --git a/var/spack/repos/builtin/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py index 87820467db..2f15e59ad4 100644 --- a/var/spack/repos/builtin/packages/jpeg/package.py +++ b/var/spack/repos/builtin/packages/jpeg/package.py @@ -1,14 +1,19 @@ from spack import * class Jpeg(Package): - """jpeg library""" + """libjpeg is a widely used free library with functions for handling the + JPEG image data format. It implements a JPEG codec (encoding and decoding) + alongside various utilities for handling JPEG data.""" + homepage = "http://www.ijg.org" - url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" + url = "http://www.ijg.org/files/jpegsrc.v9b.tar.gz" + version('9b', '6a9996ce116ec5c52b4870dbcd6d3ddb') version('9a', '3353992aecaee1805ef4109aadd433e7') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() + make("test") make("install") diff --git a/var/spack/repos/builtin/packages/julia/openblas.patch b/var/spack/repos/builtin/packages/julia/openblas.patch new file mode 100644 index 0000000000..f75d7dd04f --- /dev/null +++ b/var/spack/repos/builtin/packages/julia/openblas.patch @@ -0,0 +1,68 @@ +diff --git a/deps/Makefile b/deps/Makefile +index 6cb73be..bcd8520 100644 +--- a/deps/Makefile ++++ b/deps/Makefile +@@ -1049,7 +1049,7 @@ OPENBLAS_BUILD_OPTS += NO_AFFINITY=1 + + # Build for all architectures - required for distribution + ifeq ($(OPENBLAS_DYNAMIC_ARCH), 1) +-OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 ++OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 MAKE_NO_J=1 + endif + + # 64-bit BLAS interface +@@ -1085,6 +1085,7 @@ OPENBLAS_BUILD_OPTS += NO_AVX2=1 + endif + + $(OPENBLAS_SRC_DIR)/config.status: $(OPENBLAS_SRC_DIR)/Makefile ++ cd $(dir $@) && patch -p1 < ../openblas-make.patch + ifeq ($(OS),WINNT) + cd $(dir $@) && patch -p1 < ../openblas-win64.patch + endif +diff --git a/deps/openblas.version b/deps/openblas.version +index 7c97e1b..58b9467 100644 +--- a/deps/openblas.version ++++ b/deps/openblas.version +@@ -1,2 +1,2 @@ +-OPENBLAS_BRANCH=v0.2.15 +-OPENBLAS_SHA1=53e849f4fcae4363a64576de00e982722c7304f9 ++OPENBLAS_BRANCH=v0.2.17 ++OPENBLAS_SHA1=a71e8c82f6a9f73093b631e5deab1e8da716b61f +--- a/deps/openblas-make.patch ++++ b/deps/openblas-make.patch +@@ -0,0 +1,35 @@ ++diff --git a/Makefile.system b/Makefile.system ++index b89f60e..2dbdad0 100644 ++--- a/Makefile.system +++++ b/Makefile.system ++@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0 ++ endif ++ GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE) ++ +++ifdef MAKE_NO_J +++GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J) +++endif +++ ++ ifdef MAKE_NB_JOBS ++ GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS) ++ endif ++diff --git a/getarch.c b/getarch.c ++index f9c49e6..dffad70 100644 ++--- a/getarch.c +++++ b/getarch.c ++@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){ ++ #endif ++ #endif ++ +++#ifndef MAKE_NO_J ++ #ifdef MAKE_NB_JOBS ++ printf("MAKE += -j %d\n", MAKE_NB_JOBS); ++ #elif NO_PARALLEL_MAKE==1 ++@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){ ++ printf("MAKE += -j %d\n", get_num_cores()); ++ #endif ++ #endif +++#endif ++ ++ break; ++ diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 6900af38e4..25d782266b 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -4,43 +4,56 @@ import os class Julia(Package): """The Julia Language: A fresh approach to technical computing""" homepage = "http://julialang.org" - url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz" + url = "https://github.com/JuliaLang/julia/releases/download/v0.4.3/julia-0.4.3-full.tar.gz" - version('0.4.3', '7b9f096798fca4bef262a64674bc2b52') - version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06') + version('master', + git='https://github.com/JuliaLang/julia.git', branch='master') + version('0.4.5', '69141ff5aa6cee7c0ec8c85a34aa49a6') + version('0.4.3', '8a4a59fd335b05090dd1ebefbbe5aaac') patch('gc.patch') + patch('openblas.patch', when='@0.4:0.4.5') - # Build-time dependencies - depends_on("cmake @2.8:") + # Build-time dependencies: # depends_on("awk") # depends_on("m4") # depends_on("pkg-config") - depends_on("python @2.6:2.9") - # I think that Julia requires the dependencies above, but it builds find (on - # my system) without these. We should enable them as necessary. + # Combined build-time and run-time dependencies: + depends_on("binutils") + depends_on("cmake @2.8:") + depends_on("git") + depends_on("openssl") + depends_on("python @2.7:2.999") + + # I think that Julia requires the dependencies above, but it + # builds fine (on my system) without these. We should enable them + # as necessary. - # Run-time dependencies + # Run-time dependencies: # depends_on("arpack") # depends_on("fftw +float") # depends_on("gmp") + # depends_on("libgit") # depends_on("mpfr") + # depends_on("openblas") # depends_on("pcre2") - # ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia. + # ARPACK: Requires BLAS and LAPACK; needs to use the same version + # as Julia. - # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS - # has an option for this; make it available as variant. + # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit + # systems. OpenBLAS has an option for this; make it available as + # variant. - # FFTW: Something doesn't work when using a pre-installed FFTW library; need - # to investigate. + # FFTW: Something doesn't work when using a pre-installed FFTW + # library; need to investigate. - # GMP, MPFR: Something doesn't work when using a pre-installed FFTW library; - # need to investigate. + # GMP, MPFR: Something doesn't work when using a pre-installed + # FFTW library; need to investigate. - # LLVM: Julia works only with specific versions, and might require patches. - # Thus we let Julia install its own LLVM. + # LLVM: Julia works only with specific versions, and might require + # patches. Thus we let Julia install its own LLVM. # Other possible dependencies: # USE_SYSTEM_OPENLIBM=0 @@ -50,11 +63,21 @@ class Julia(Package): # USE_SYSTEM_UTF8PROC=0 # USE_SYSTEM_LIBGIT2=0 + # Run-time dependencies for Julia packages: + depends_on("hdf5") + depends_on("mpi") + def install(self, spec, prefix): - # Explicitly setting CC, CXX, or FC breaks building libuv, one of - # Julia's dependencies. This might be a Darwin-specific problem. Given - # how Spack sets up compilers, Julia should still use Spack's compilers, - # even if we don't specify them explicitly. + if '@master' in spec: + # Julia needs to know the offset from a specific commit + git = which('git') + git('fetch', '--unshallow') + + # Explicitly setting CC, CXX, or FC breaks building libuv, one + # of Julia's dependencies. This might be a Darwin-specific + # problem. Given how Spack sets up compilers, Julia should + # still use Spack's compilers, even if we don't specify them + # explicitly. options = [#"CC=cc", #"CXX=c++", #"FC=fc", diff --git a/var/spack/repos/builtin/packages/kripke/package.py b/var/spack/repos/builtin/packages/kripke/package.py new file mode 100644 index 0000000000..7d067ea44d --- /dev/null +++ b/var/spack/repos/builtin/packages/kripke/package.py @@ -0,0 +1,32 @@ +from spack import * + +class Kripke(Package): + """Kripke is a simple, scalable, 3D Sn deterministic particle + transport proxy/mini app. + """ + homepage = "https://codesign.llnl.gov/kripke.php" + url = "https://codesign.llnl.gov/downloads/kripke-openmp-1.1.tar.gz" + + version('1.1', '7fe6f2b26ed983a6ce5495ab701f85bf') + + variant('mpi', default=True, description='Build with MPI.') + variant('openmp', default=True, description='Build with OpenMP enabled.') + + depends_on('mpi', when="+mpi") + + def install(self, spec, prefix): + with working_dir('build', create=True): + def enabled(variant): + return (1 if variant in spec else 0) + + cmake('-DCMAKE_INSTALL_PREFIX:PATH=.', + '-DENABLE_OPENMP=%d' % enabled('+openmp'), + '-DENABLE_MPI=%d' % enabled('+mpi'), + '..', + *std_cmake_args) + make() + + # Kripke does not provide install target, so we have to copy + # things into place. + mkdirp(prefix.bin) + install('kripke', prefix.bin) diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py index 73c8c62341..9d5782896e 100644 --- a/var/spack/repos/builtin/packages/libpng/package.py +++ b/var/spack/repos/builtin/packages/libpng/package.py @@ -8,6 +8,9 @@ class Libpng(Package): version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') version('1.6.15', '829a256f3de9307731d4f52dc071916d') version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + version('1.5.26', '3ca98347a5541a2dad55cd6d07ee60a9') + version('1.4.19', '89bcbc4fc8b31f4a403906cf4f662330') + version('1.2.56', '9508fc59d10a1ffadd9aae35116c19ee') depends_on('zlib') diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py new file mode 100644 index 0000000000..7f25edaf76 --- /dev/null +++ b/var/spack/repos/builtin/packages/libtermkey/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Libtermkey(Package): + """Easy keyboard entry processing for terminal programs""" + homepage = "http://www.leonerd.org.uk/code/libtermkey/" + url = "http://www.leonerd.org.uk/code/libtermkey/libtermkey-0.18.tar.gz" + + version('0.18' , '3be2e3e5a851a49cc5e8567ac108b520') + version('0.17' , '20edb99e0d95ec1690fe90e6a555ae6d') + version('0.16' , '7a24b675aaeb142d30db28e7554987d4') + version('0.15b', '27689756e6c86c56ae454f2ac259bc3d') + version('0.14' , 'e08ce30f440f9715c459060e0e048978') + + + def install(self, spec, prefix): + make() + make("install", "PREFIX=" + prefix) diff --git a/var/spack/repos/builtin/packages/libtool/package.py b/var/spack/repos/builtin/packages/libtool/package.py index 82a54953b2..c804f5ab5d 100644 --- a/var/spack/repos/builtin/packages/libtool/package.py +++ b/var/spack/repos/builtin/packages/libtool/package.py @@ -8,6 +8,8 @@ class Libtool(Package): version('2.4.6' , 'addf44b646ddb4e3919805aa88fa7c5e') version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50') + depends_on('m4') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/libuv/package.py b/var/spack/repos/builtin/packages/libuv/package.py new file mode 100644 index 0000000000..eace94d1a6 --- /dev/null +++ b/var/spack/repos/builtin/packages/libuv/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Libuv(Package): + """Multi-platform library with a focus on asynchronous IO""" + homepage = "http://libuv.org" + url = "https://github.com/libuv/libuv/archive/v1.9.0.tar.gz" + + version('1.9.0', '14737f9c76123a19a290dabb7d1cd04c') + + depends_on('automake') + depends_on('autoconf') + depends_on('libtool') + + def install(self, spec, prefix): + bash = which("bash") + bash('autogen.sh') + configure('--prefix=%s' % prefix) + + make() + make("check") + make("install") diff --git a/var/spack/repos/builtin/packages/libvterm/package.py b/var/spack/repos/builtin/packages/libvterm/package.py new file mode 100644 index 0000000000..3212f6550d --- /dev/null +++ b/var/spack/repos/builtin/packages/libvterm/package.py @@ -0,0 +1,12 @@ +from spack import * + +class Libvterm(Package): + """An abstract library implementation of a terminal emulator""" + homepage = "http://www.leonerd.org.uk/code/libvterm/" + url = "http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz" + + version('681', '7a4325a7350b7092245c04e8ee185ac3') + + def install(self, spec, prefix): + make() + make("install", "PREFIX=" + prefix) diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py new file mode 100644 index 0000000000..010a5918c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/libxc/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Libxc(Package): + """Libxc is a library of exchange-correlation functionals for + density-functional theory.""" + + homepage = "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc" + url = "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz" + + version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec') + + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--enable-shared') + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py index d7d94c4546..b2543be5da 100644 --- a/var/spack/repos/builtin/packages/libxcb/package.py +++ b/var/spack/repos/builtin/packages/libxcb/package.py @@ -13,6 +13,7 @@ class Libxcb(Package): version('1.11.1', '118623c15a96b08622603a71d8789bf3') depends_on("python") depends_on("xcb-proto") + depends_on("pkg-config") # depends_on('pthread') # Ubuntu: apt-get install libpthread-stubs0-dev # depends_on('xau') # Ubuntu: apt-get install libxau-dev diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py index d3bab554fe..b05f23a3dc 100644 --- a/var/spack/repos/builtin/packages/metis/package.py +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -24,7 +24,7 @@ ############################################################################## from spack import * -import glob,sys +import glob, sys, os class Metis(Package): """ @@ -36,7 +36,10 @@ class Metis(Package): homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz" - version('5.1.0', '5465e67079419a69e0116de24fce58fe') + version('5.1.0', '5465e67079419a69e0116de24fce58fe', + url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz') + version('4.0.3', '5efa35de80703c1b2c4d0de080fafbcf4e0d363a21149a1ad2f96e0144841a55', + url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD/metis-4.0.3.tar.gz') variant('shared', default=True, description='Enables the build of shared libraries') variant('debug', default=False, description='Builds the library in debug mode') @@ -45,12 +48,85 @@ class Metis(Package): variant('idx64', default=False, description='Use int64_t as default index type') variant('double', default=False, description='Use double precision floating point types') - depends_on('cmake @2.8:') # build-time dependency - + depends_on('cmake @2.8:', when='@5:') # build-time dependency depends_on('gdb', when='+gdb') - patch('install_gklib_defs_rename.patch') + patch('install_gklib_defs_rename.patch', when='@5:') + + + @when('@4:4.0.3') + def install(self, spec, prefix): + + if '+gdb' in spec: + raise InstallError('gdb support not implemented in METIS 4!') + if '+idx64' in spec: + raise InstallError('idx64 option not implemented in METIS 4!') + if '+double' in spec: + raise InstallError('double option not implemented for METIS 4!') + + options = ['COPTIONS=-fPIC'] + if '+debug' in spec: + options.append('OPTFLAGS=-g -O0') + make(*options) + + mkdir(prefix.bin) + for x in ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh', + 'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk'): + install(x, prefix.bin) + + mkdir(prefix.lib) + install('libmetis.a', prefix.lib) + + mkdir(prefix.include) + for h in glob.glob(join_path('Lib', '*.h')): + install(h, prefix.include) + + mkdir(prefix.share) + for f in (join_path(*p) + for p in (('Programs', 'io.c'), + ('Test','mtest.c'), + ('Graphs','4elt.graph'), + ('Graphs', 'metis.mesh'), + ('Graphs', 'test.mgraph'))): + install(f, prefix.share) + if '+shared' in spec: + if sys.platform == 'darwin': + lib_dsuffix = 'dylib' + load_flag = '-Wl,-all_load' + no_load_flag = '' + else: + lib_dsuffix = 'so' + load_flag = '-Wl,-whole-archive' + no_load_flag = '-Wl,-no-whole-archive' + + os.system(spack_cc + ' -fPIC -shared ' + load_flag + + ' libmetis.a ' + no_load_flag + ' -o libmetis.' + + lib_dsuffix) + install('libmetis.' + lib_dsuffix, prefix.lib) + + # Set up and run tests on installation + symlink(join_path(prefix.share, 'io.c'), 'io.c') + symlink(join_path(prefix.share, 'mtest.c'), 'mtest.c') + os.system(spack_cc + ' -I%s' % prefix.include + ' -c io.c') + os.system(spack_cc + ' -I%s' % prefix.include + + ' -L%s' % prefix.lib + ' -lmetis mtest.c io.o -o mtest') + _4eltgraph = join_path(prefix.share, '4elt.graph') + test_mgraph = join_path(prefix.share, 'test.mgraph') + metis_mesh = join_path(prefix.share, 'metis.mesh') + kmetis = join_path(prefix.bin, 'kmetis') + os.system('./mtest ' + _4eltgraph) + os.system(kmetis + ' ' + _4eltgraph + ' 40') + os.system(join_path(prefix.bin, 'onmetis') + ' ' + _4eltgraph) + os.system(join_path(prefix.bin, 'pmetis') + ' ' + test_mgraph + ' 2') + os.system(kmetis + ' ' + test_mgraph + ' 2') + os.system(kmetis + ' ' + test_mgraph + ' 5') + os.system(join_path(prefix.bin, 'partnmesh') + metis_mesh + ' 10') + os.system(join_path(prefix.bin, 'partdmesh') + metis_mesh + ' 10') + os.system(join_path(prefix.bin, 'mesh2dual') + metis_mesh) + + + @when('@5:') def install(self, spec, prefix): options = [] @@ -60,6 +136,7 @@ class Metis(Package): source_directory = self.stage.source_path options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory)) + options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix) if '+shared' in spec: options.append('-DSHARED:BOOL=ON') @@ -108,7 +185,3 @@ class Metis(Package): fs = glob.glob(join_path(source_directory,'GKlib',"*.h")) for f in fs: install(f, GKlib_dist) - - # The shared library is not installed correctly on Darwin; correct this - if (sys.platform == 'darwin') and ('+shared' in spec): - fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py new file mode 100644 index 0000000000..510e09c4e1 --- /dev/null +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -0,0 +1,125 @@ +from spack import * +import glob, string + +class Mfem(Package): + """Free, lightweight, scalable C++ library for finite element methods.""" + + homepage = 'http://www.mfem.org' + url = 'https://github.com/mfem/mfem' + +# version('3.1', git='https://github.com/mfem/mfem.git', +# commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574') + + version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57', + url='http://goo.gl/xrScXn', expand=False) + + variant('metis', default=False, description='Activate support for metis') + variant('hypre', default=False, description='Activate support for hypre') + variant('suite-sparse', default=False, + description='Activate support for SuiteSparse') + variant('mpi', default=False, description='Activate support for MPI') + variant('lapack', default=False, description='Activate support for LAPACK') + variant('debug', default=False, description='Build debug version') + + depends_on('blas', when='+lapack') + depends_on('lapack', when='+lapack') + + depends_on('mpi', when='+mpi') + depends_on('metis', when='+mpi') + depends_on('hypre', when='+mpi') + + depends_on('hypre', when='+hypre') + + depends_on('metis@4:', when='+metis') + + depends_on('suite-sparse', when='+suite-sparse') + depends_on('blas', when='+suite-sparse') + depends_on('lapack', when='+suite-sparse') + depends_on('metis@5:', when='+suite-sparse ^suite-sparse@4.5:') + depends_on('cmake', when='^metis@5:') + + def check_variants(self, spec): + if '+mpi' in spec and ('+hypre' not in spec or '+metis' not in spec): + raise InstallError('mfem+mpi must be built with +hypre ' + + 'and +metis!') + if '+suite-sparse' in spec and ('+metis' not in spec or + '+lapack' not in spec): + raise InstallError('mfem+suite-sparse must be built with ' + + '+metis and +lapack!') + if 'metis@5:' in spec and '%clang' in spec and ('^cmake %gcc' not in spec): + raise InstallError('To work around CMake bug with clang, must ' + + 'build mfem with mfem[+variants] %clang ' + + '^cmake %gcc to force CMake to build with gcc') + return + + def install(self, spec, prefix): + self.check_variants(spec) + + options = ['PREFIX=%s' % prefix] + + if '+lapack' in spec: + lapack_lib = '-L{0} -llapack -L{1} -lblas'.format( + spec['lapack'].prefix.lib, spec['blas'].prefix.lib) + options.extend(['MFEM_USE_LAPACK=YES', + 'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include, + 'LAPACK_LIB=%s' % lapack_lib]) + + if '+hypre' in spec: + options.extend(['HYPRE_DIR=%s' % spec['hypre'].prefix, + 'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include, + 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib + + ' -lHYPRE']) + + if '+metis' in spec: + metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib + if spec['metis'].satisfies('@5:'): + metis_str = 'MFEM_USE_METIS_5=YES' + else: + metis_str = 'MFEM_USE_METIS_5=NO' + options.extend([metis_str, + 'METIS_DIR=%s' % spec['metis'].prefix, + 'METIS_OPT=-I%s' % spec['metis'].prefix.include, + 'METIS_LIB=%s' % metis_lib]) + + if '+mpi' in spec: options.extend(['MFEM_USE_MPI=YES']) + + if '+suite-sparse' in spec: + ssp = spec['suite-sparse'].prefix + ss_lib = '-L%s' % ssp.lib + ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' + + ' -lccolamd -lsuitesparseconfig') + + no_librt_archs = ['darwin-i686', 'darwin-x86_64'] + no_rt = any(map(lambda a: spec.satisfies('='+a), no_librt_archs)) + if not no_rt: ss_lib += ' -lrt' + ss_lib += (' ' + metis_lib + ' ' + lapack_lib) + + options.extend(['MFEM_USE_SUITESPARSE=YES', + 'SUITESPARSE_DIR=%s' % ssp, + 'SUITESPARSE_OPT=-I%s' % ssp.include, + 'SUITESPARSE_LIB=%s' % ss_lib]) + + if '+debug' in spec: options.extend(['MFEM_DEBUG=YES']) + + # Dirty hack to cope with URL redirect + tgz_file = string.split(self.url,'/')[-1] + tar = which('tar') + tar('xzvf', tgz_file) + cd(glob.glob('mfem*')[0]) + # End dirty hack to cope with URL redirect + + make('config', *options) + make('all') + + # Run a small test before installation + args = ['-m', join_path('data','star.mesh'), '--no-visualization'] + if '+mpi' in spec: + Executable(join_path(spec['mpi'].prefix.bin, + 'mpirun'))('-np', + '4', + join_path('examples','ex1p'), + *args) + else: + Executable(join_path('examples', 'ex1'))(*args) + + make('install') diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index 2d7955e08d..2179086fe5 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -43,24 +43,29 @@ class Mpich(Package): version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') variant('verbs', default=False, description='Build support for OpenFabrics verbs.') + variant('pmi', default=True, description='Build with PMI support') + variant('hydra', default=True, description='Build the hydra process manager') provides('mpi@:3.0', when='@3:') provides('mpi@:1.3', when='@1:') - def setup_dependent_environment(self, env, dependent_spec): - env.set('MPICH_CC', spack_cc) - env.set('MPICH_CXX', spack_cxx) - env.set('MPICH_F77', spack_f77) - env.set('MPICH_F90', spack_f90) - env.set('MPICH_FC', spack_fc) + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + spack_env.set('MPICH_CC', spack_cc) + spack_env.set('MPICH_CXX', spack_cxx) + spack_env.set('MPICH_F77', spack_f77) + spack_env.set('MPICH_F90', spack_fc) + spack_env.set('MPICH_FC', spack_fc) def setup_dependent_package(self, module, dep_spec): - """For dependencies, make mpicc's use spack wrapper.""" - # FIXME : is this necessary ? Shouldn't this be part of a contract with MPI providers? - module.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++') + self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') + self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') def install(self, spec, prefix): config_args = ["--prefix=" + prefix, + "--with-pmi=" + ("yes" if '+pmi' in spec else 'no'), + "--with-pm=" + ('hydra' if '+hydra' in spec else 'no'), "--enable-shared"] # Variants diff --git a/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch b/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch new file mode 100644 index 0000000000..53294fbbc6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch @@ -0,0 +1,154 @@ +--- mrnet-3093918/include/mrnet/Types.h 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/include/mrnet/Types.h 2016-03-16 12:29:33.986132302 -0700 +@@ -23,7 +23,7 @@ + #ifndef MRNET_VERSION_MAJOR + # define MRNET_VERSION_MAJOR 5 + # define MRNET_VERSION_MINOR 0 +-# define MRNET_VERSION_REV 0 ++# define MRNET_VERSION_REV 1 + #endif + + namespace MRN +--- mrnet-3093918/include/mrnet_lightweight/Types.h 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/include/mrnet_lightweight/Types.h 2016-03-16 12:29:33.987132302 -0700 +@@ -30,7 +30,7 @@ + #ifndef MRNET_VERSION_MAJOR + #define MRNET_VERSION_MAJOR 5 + #define MRNET_VERSION_MINOR 0 +-#define MRNET_VERSION_REV 0 ++#define MRNET_VERSION_REV 1 + #endif + void get_Version(int* major, + int* minor, +--- mrnet-3093918/src/lightweight/SerialGraph.c 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/src/lightweight/SerialGraph.c 2016-03-16 12:29:33.995132302 -0700 +@@ -59,7 +59,7 @@ + + mrn_dbg_func_begin(); + +- sprintf(hoststr, "[%s:%hu:%u:", ihostname, iport, irank); ++ sprintf(hoststr, "[%s:%05hu:%u:", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "looking for SubTreeRoot: '%s'\n", hoststr)); + + byte_array = sg->byte_array; +@@ -110,7 +110,7 @@ + + mrn_dbg_func_begin(); + +- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:0]", ihostname, iport, irank); ++ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:0]", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree leaf: %s\n", hoststr)); + + len += strlen(sg->byte_array) + 1; +@@ -139,7 +139,7 @@ + + mrn_dbg_func_begin(); + +- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:1", ihostname, iport, irank); ++ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:1", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree root: %s\n", hoststr)); + + len += strlen(sg->byte_array) + 1; +@@ -360,8 +360,8 @@ + char old_hoststr[256]; + char new_hoststr[256]; + +- sprintf(old_hoststr, "[%s:%hu:%u:", hostname, UnknownPort, irank); +- sprintf(new_hoststr, "[%s:%hu:%u:", hostname, port, irank); ++ sprintf(old_hoststr, "[%s:%05hu:%u:", hostname, UnknownPort, irank); ++ sprintf(new_hoststr, "[%s:%05hu:%u:", hostname, port, irank); + + old_byte_array = sg->byte_array; + new_byte_array = (char*) malloc( strlen(old_byte_array) + 10 ); +--- mrnet-3093918/xplat/src/lightweight/SocketUtils.c 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/xplat/src/lightweight/SocketUtils.c 2016-03-16 12:29:34.006132303 -0700 +@@ -15,7 +15,7 @@ + #else + const XPlat_Socket InvalidSocket = INVALID_SOCKET; + #endif +-const XPlat_Port InvalidPort = (XPlat_Port)-1; ++const XPlat_Port InvalidPort = (XPlat_Port)0; + + static bool_t SetTcpNoDelay( XPlat_Socket sock ) + { +--- mrnet-3093918/conf/configure.in 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/conf/configure.in 2016-03-16 12:45:54.573196781 -0700 +@@ -107,6 +107,18 @@ + AC_SUBST(PURIFY) + + ++AC_ARG_WITH(expat, ++ [AS_HELP_STRING([--with-expat=PATH], ++ [Absolute path to installation of EXPAT libraries (note: specify the path to the directory containing "include" and "lib" sub-directories)])], ++ [EXPAT_DIR="${withval}"], ++ [EXPAT_DIR=""]) ++ ++if test "x$EXPAT_DIR" = "x" ; then ++ EXPAT_LIB="" ++else ++ EXPAT_LIB="-L$EXPAT_DIR/lib" ++fi ++ + dnl === Checks for header files. + AC_CHECK_HEADERS([assert.h errno.h fcntl.h limits.h netdb.h signal.h stddef.h stdlib.h stdio.h string.h unistd.h arpa/inet.h netinet/in.h sys/ioctl.h sys/socket.h sys/sockio.h sys/time.h]) + AC_HEADER_STDBOOL +@@ -432,7 +444,7 @@ + CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps" + CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic" + CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps" +- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic" ++ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic" + + AC_CHECK_LIB( [alps], [alps_launch_tool_helper], + [HAVE_ATH_LIBS="yes"; EXTRA_LIBS="$CRAYXT_ATH_LIBS $EXTRA_LIBS"; EXTRA_LIBS_SO="$CRAYXT_ATH_LIBS_SO $EXTRA_LIBS_SO"], +--- mrnet-3093918/configure 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/configure 2016-03-16 13:47:20.386439143 -0700 +@@ -742,6 +742,7 @@ + enable_debug + enable_ltwt_threadsafe + with_purify ++with_expat + ' + ac_precious_vars='build_alias + host_alias +@@ -1399,6 +1400,9 @@ + containing "include" and "lib" sub-directories) + --with-launchmon=PATH Absolute path to installation of LaunchMON + --with-purify Use purify for memory debugging ++ --with-expat=PATH Absolute path to installation of EXPAT libraries ++ (note: specify the path to the directory containing ++ "include" and "lib" sub-directories) + + Some influential environment variables: + CC C compiler command +@@ -3541,6 +3545,21 @@ + + + ++# Check whether --with-expat was given. ++if test "${with_expat+set}" = set; then : ++ withval=$with_expat; EXPAT_DIR="${withval}" ++else ++ EXPAT_DIR="" ++fi ++ ++ ++if test "x$EXPAT_DIR" = "x" ; then ++ EXPAT_LIB="" ++else ++ EXPAT_LIB="-L$EXPAT_DIR/lib" ++fi ++ ++ + ac_ext=cpp + ac_cpp='$CXXCPP $CPPFLAGS' + ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +@@ -5473,7 +5492,7 @@ + CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps" + CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic" + CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps" +- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic" ++ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic" + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for alps_launch_tool_helper in -lalps" >&5 + $as_echo_n "checking for alps_launch_tool_helper in -lalps... " >&6; } diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py index fed944e45f..b52233be4a 100644 --- a/var/spack/repos/builtin/packages/mrnet/package.py +++ b/var/spack/repos/builtin/packages/mrnet/package.py @@ -3,11 +3,18 @@ from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" - url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" + url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz" + list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet" - version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') - version('4.1.0', '5a248298b395b329e2371bf25366115c') + version('5.0.1-2', git='https://github.com/dyninst/mrnet.git', commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9') version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd') + version('4.1.0', '5a248298b395b329e2371bf25366115c') + version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') + + # Add a patch that brings mrnet-5.0.1 up to date with the current development tree + # The development tree contains fixes needed for the krell based tools + variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.") + patch('krell-5.0.1.patch', when='@5.0.1+krellpatch') variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False diff --git a/var/spack/repos/builtin/packages/msgpack-c/package.py b/var/spack/repos/builtin/packages/msgpack-c/package.py new file mode 100644 index 0000000000..a363bc89be --- /dev/null +++ b/var/spack/repos/builtin/packages/msgpack-c/package.py @@ -0,0 +1,14 @@ +from spack import * + +class MsgpackC(Package): + """A small, fast binary interchange format convertible to/from JSON""" + homepage = "http://www.msgpack.org" + url = "https://github.com/msgpack/msgpack-c/archive/cpp-1.4.1.tar.gz" + + version('1.4.1', 'e2fd3a7419b9bc49e5017fdbefab87e0') + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 025d86ebdc..58f790ec32 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -23,7 +23,7 @@ class Mumps(Package): depends_on('scotch + esmumps', when='~ptscotch+scotch') depends_on('scotch + esmumps + mpi', when='+ptscotch') - depends_on('metis', when='+metis') + depends_on('metis@5:', when='+metis') depends_on('parmetis', when="+parmetis") depends_on('blas') depends_on('lapack') diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index e4e95f92af..c68a04d251 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -140,6 +140,19 @@ class Mvapich2(Package): configure_args.extend(network_options) + def setup_dependent_environment(self, spack_env, run_env, extension_spec): + spack_env.set('MPICH_CC', spack_cc) + spack_env.set('MPICH_CXX', spack_cxx) + spack_env.set('MPICH_F77', spack_f77) + spack_env.set('MPICH_F90', spack_fc) + spack_env.set('MPICH_FC', spack_fc) + + def setup_dependent_package(self, module, dep_spec): + self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx') + self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') + self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') + def install(self, spec, prefix): # we'll set different configure flags depending on our environment configure_args = [ diff --git a/var/spack/repos/builtin/packages/nccmp/package.py b/var/spack/repos/builtin/packages/nccmp/package.py new file mode 100644 index 0000000000..72e86831c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/nccmp/package.py @@ -0,0 +1,23 @@ +from spack import * + +class Nccmp(Package): + """Compare NetCDF Files""" + homepage = "http://nccmp.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz" + + version('1.8.2.0', '81e6286d4413825aec4327e61a28a580') + + depends_on('netcdf') + + def install(self, spec, prefix): + # Configure says: F90 and F90FLAGS are replaced by FC and + # FCFLAGS respectively in this configure, please unset + # F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure + # again. + env.pop('F90', None) + env.pop('F90FLAGS', None) + + configure('--prefix=%s' % prefix) + make() + make("check") + make("install") diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py new file mode 100644 index 0000000000..3a9aeaa656 --- /dev/null +++ b/var/spack/repos/builtin/packages/nco/package.py @@ -0,0 +1,30 @@ +from spack import * +import os + +class Nco(Package): + """The NCO toolkit manipulates and analyzes data stored in + netCDF-accessible formats""" + + homepage = "https://sourceforge.net/projects/nco" + url = "https://github.com/nco/nco/archive/4.5.5.tar.gz" + + version('4.5.5', '9f1f1cb149ad6407c5a03c20122223ce') + + # See "Compilation Requirements" at: + # http://nco.sourceforge.net/#bld + + depends_on('netcdf') + depends_on('antlr@2.7.7+cxx') # (required for ncap2) + depends_on('gsl') # (desirable for ncap2) + depends_on('udunits2') # (allows dimensional unit transformations) + # depends_on('opendap') # (enables network transparency), + + def install(self, spec, prefix): + opts = [ + '--prefix=%s' % prefix, + '--disable-openmp', # TODO: Make this a variant + '--disable-dap', # TODO: Make this a variant + '--disable-esmf'] + configure(*opts) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py index 8dc808caac..219fbce226 100644 --- a/var/spack/repos/builtin/packages/ncurses/package.py +++ b/var/spack/repos/builtin/packages/ncurses/package.py @@ -8,11 +8,10 @@ class Ncurses(Package): """ homepage = "http://invisible-island.net/ncurses/ncurses.html" + url = "http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz" - version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') - version('6.0', 'ee13d052e1ead260d7c28071f46eefb1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz') + version('6.0', 'ee13d052e1ead260d7c28071f46eefb1') + version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1') patch('patch_gcc_5.txt', when='%gcc@5.0:') diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py new file mode 100644 index 0000000000..1aa13e3f03 --- /dev/null +++ b/var/spack/repos/builtin/packages/ncview/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Ncview(Package): + """Simple viewer for NetCDF files.""" + homepage = "http://meteora.ucsd.edu/~pierce/ncview_home_page.html" + url = "ftp://cirrus.ucsd.edu/pub/ncview/ncview-2.1.7.tar.gz" + + version('2.1.7', 'debd6ca61410aac3514e53122ab2ba07') + + depends_on("netcdf") + depends_on("udunits2") + + # OS Dependencies + # Ubuntu: apt-get install libxaw7-dev + # CentOS 7: yum install libXaw-devel + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index b60a2c4e9a..4aad0f6f3c 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -12,15 +12,19 @@ class Netcdf(Package): version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e') version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') - variant('mpi', default=True, description='Enables MPI parallelism') - variant('hdf4', default=False, description="Enable HDF4 support") + variant('mpi', default=True, description='Enables MPI parallelism') + variant('hdf4', default=False, description='Enable HDF4 support') - # Dependencies: - depends_on("curl") # required for DAP support + depends_on("m4") depends_on("hdf", when='+hdf4') - depends_on("hdf5+mpi~cxx", when='+mpi') # required for NetCDF-4 support - depends_on("hdf5~mpi", when='~mpi') # required for NetCDF-4 support - depends_on("zlib") # required for NetCDF-4 support + + # Required for DAP support + depends_on("curl") + + # Required for NetCDF-4 support + depends_on("zlib") + depends_on("hdf5+mpi", when='+mpi') + depends_on("hdf5~mpi", when='~mpi') def install(self, spec, prefix): # Environment variables @@ -48,7 +52,7 @@ class Netcdf(Package): # /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0' LIBS.append("-lcurl") CPPFLAGS.append("-I%s" % spec['curl'].prefix.include) - LDFLAGS.append ("-L%s" % spec['curl'].prefix.lib) + LDFLAGS.append( "-L%s" % spec['curl'].prefix.lib) if '+mpi' in spec: config_args.append('--enable-parallel4') diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index 05436332ac..f70e634347 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -34,15 +34,17 @@ class NetlibLapack(Package): def patch(self): # Fix cblas CMakeLists.txt -- has wrong case for subdirectory name. - filter_file('${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/', - '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True) - + if self.spec.satisfies('@3.6.0:'): + filter_file('${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/', + '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True) def install_one(self, spec, prefix, shared): cmake_args = ['-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'), - '-DCBLAS=ON', # always build CBLAS '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')] + if spec.satisfies('@3.6.0:'): + cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS + if '+external-blas' in spec: # TODO : the mechanism to specify the library should be more general, # TODO : but this allows to have an hook to an external blas @@ -80,6 +82,3 @@ class NetlibLapack(Package): if '+shared' in self.spec: self.spec.blas_shared_lib = join_path(libdir, 'libblas.%s' % dso_suffix) self.spec.lapack_shared_lib = join_path(libdir, 'liblapack.%s' % dso_suffix) - - - diff --git a/var/spack/repos/builtin/packages/openblas/make.patch b/var/spack/repos/builtin/packages/openblas/make.patch new file mode 100644 index 0000000000..851214211a --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/make.patch @@ -0,0 +1,35 @@ +diff --git a/Makefile.system b/Makefile.system +index b89f60e..2dbdad0 100644 +--- a/Makefile.system ++++ b/Makefile.system +@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0 + endif + GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE) + ++ifdef MAKE_NO_J ++GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J) ++endif ++ + ifdef MAKE_NB_JOBS + GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS) + endif +diff --git a/getarch.c b/getarch.c +index f9c49e6..dffad70 100644 +--- a/getarch.c ++++ b/getarch.c +@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){ + #endif + #endif + ++#ifndef MAKE_NO_J + #ifdef MAKE_NB_JOBS + printf("MAKE += -j %d\n", MAKE_NB_JOBS); + #elif NO_PARALLEL_MAKE==1 +@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){ + printf("MAKE += -j %d\n", get_num_cores()); + #endif + #endif ++#endif + + break; + diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 4ec829a85b..d147533491 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -1,26 +1,35 @@ from spack import * import sys import os +import shutil class Openblas(Package): """OpenBLAS: An optimized BLAS library""" homepage = "http://www.openblas.net" url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + version('0.2.18', '805e7f660877d588ea7e3792cda2ee65') version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1') version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc') version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') - variant('shared', default=True, description="Build shared libraries as well as static libs.") + variant('shared', default=True, description="Build shared libraries as well as static libs.") + variant('openmp', default=False, description="Enable OpenMP support.") + variant('fpic', default=True, description="Build position independent code") # virtual dependency provides('blas') provides('lapack') + patch('make.patch') def install(self, spec, prefix): - make_defs = ['CC=%s' % spack_cc, - 'FC=%s' % spack_fc] + # Openblas is picky about compilers. Configure fails with + # FC=/abs/path/to/f77, whereas FC=f77 works fine. + # To circumvent this, provide basename only: + make_defs = ['CC=%s' % os.path.basename(spack_cc), + 'FC=%s' % os.path.basename(spack_f77), + 'MAKE_NO_J=1'] make_targets = ['libs', 'netlib'] @@ -28,12 +37,24 @@ class Openblas(Package): if '+shared' in spec: make_targets += ['shared'] else: + if '+fpic' in spec: + make_defs.extend(['CFLAGS=-fPIC', 'FFLAGS=-fPIC']) make_defs += ['NO_SHARED=1'] # fix missing _dggsvd_ and _sggsvd_ if spec.satisfies('@0.2.16'): make_defs += ['BUILD_LAPACK_DEPRECATED=1'] + # Add support for OpenMP + if '+openmp' in spec: + # Note: Apple's most recent Clang 7.3.0 still does not support OpenMP. + # What is worse, Openblas (as of 0.2.18) hardcoded that OpenMP cannot + # be used with any (!) compiler named clang, bummer. + if spec.satisfies('%clang'): + raise InstallError('OpenBLAS does not support OpenMP with clang!') + + make_defs += ['USE_OPENMP=1'] + make_args = make_defs + make_targets make(*make_args) @@ -55,6 +76,10 @@ class Openblas(Package): if '+shared' in spec: symlink('libopenblas.%s' % dso_suffix, 'liblapack.%s' % dso_suffix) + # Openblas may pass its own test but still fail to compile Lapack + # symbols. To make sure we get working Blas and Lapack, do a small test. + self.check_install(spec) + def setup_dependent_package(self, module, dspec): # This is WIP for a prototype interface for virtual packages. @@ -68,3 +93,43 @@ class Openblas(Package): self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' % dso_suffix) self.spec.lapack_shared_lib = self.spec.blas_shared_lib + def check_install(self, spec): + # TODO: Pull this out to the framework function which recieves a pair of xyz.c and xyz.output + print "Checking Openblas installation..." + source_file = join_path(os.path.dirname(self.module.__file__), + 'test_cblas_dgemm.c') + output_file = join_path(os.path.dirname(self.module.__file__), + 'test_cblas_dgemm.output') + + with open(output_file, 'r') as f: + expected = f.read() + + cc = which('cc') + cc('-c', "-I%s" % join_path(spec.prefix, "include"), source_file) + link_flags = ["-L%s" % join_path(spec.prefix, "lib"), + "-llapack", + "-lblas", + "-lpthread" + ] + if '+openmp' in spec: + link_flags.extend([self.compiler.openmp_flag]) + cc('-o', "check", "test_cblas_dgemm.o", + *link_flags) + + try: + check = Executable('./check') + output = check(return_output=True) + except: + output = "" + success = output == expected + if not success: + print "Produced output does not match expected output." + print "Expected output:" + print '-'*80 + print expected + print '-'*80 + print "Produced output:" + print '-'*80 + print output + print '-'*80 + raise RuntimeError("Openblas install check failed") diff --git a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c new file mode 100644 index 0000000000..3813a23b69 --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c @@ -0,0 +1,49 @@ +#include <cblas.h> +#include <stdio.h> + +double m[] = { + 3, 1, 3, + 1, 5, 9, + 2, 6, 5 +}; + +double x[] = { + -1, 3, -3 +}; + +#ifdef __cplusplus +extern "C" { +#endif + + void dgesv_(int *n, int *nrhs, double *a, int *lda, + int *ipivot, double *b, int *ldb, int *info); + +#ifdef __cplusplus +} +#endif + +int main(void) { + int i; + // blas: + double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0}; + double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0}; + double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5}; + cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans, + 3, 3, 2, 1, A, 3, B, 3, 2, C, 3); + for (i = 0; i < 9; i++) + printf("%f\n", C[i]); + + // lapack: + int ipiv[3]; + int j; + int info; + int n = 1; + int nrhs = 1; + int lda = 3; + int ldb = 3; + dgesv_(&n,&nrhs, &m[0], &lda, ipiv, &x[0], &ldb, &info); + for (i=0; i<3; ++i) + printf("%5.1f %3d\n", x[i], ipiv[i]); + + return 0; +} diff --git a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output new file mode 100644 index 0000000000..9c235e314f --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output @@ -0,0 +1,12 @@ +11.000000 +-9.000000 +5.000000 +-9.000000 +21.000000 +-1.000000 +5.000000 +-1.000000 +3.000000 + -0.3 1 + 3.0 1499101120 + -3.0 32767 diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 9a127f1812..d0dd2d657f 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -1,7 +1,5 @@ -import os - from spack import * - +import os class Openmpi(Package): """Open MPI is a project combining technologies and resources from @@ -28,6 +26,7 @@ class Openmpi(Package): patch('configure.patch', when="@1.10.0:1.10.1") variant('psm', default=False, description='Build support for the PSM library.') + variant('pmi', default=True, description='Build support for PMI-based launchers') variant('verbs', default=False, description='Build support for OpenFabrics verbs.') # TODO : variant support for other schedulers is missing @@ -38,6 +37,7 @@ class Openmpi(Package): depends_on('hwloc') + def url_for_version(self, version): return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version) @@ -48,6 +48,12 @@ class Openmpi(Package): spack_env.set('OMPI_FC', spack_fc) spack_env.set('OMPI_F77', spack_f77) + def setup_dependent_package(self, module, dep_spec): + self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++') + self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') + self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') + def install(self, spec, prefix): config_args = ["--prefix=%s" % prefix, @@ -62,6 +68,9 @@ class Openmpi(Package): if '+psm' in spec: config_args.append("--with-psm") + if '+pmi' in spec: + config_args.append("--with-pmi") #TODO: let user specify directory when possible + if '+verbs' in spec: # Up through version 1.6, this option was previously named --with-openib if spec.satisfies('@:1.6'): diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py index 8c71bcb7c3..bcd77351aa 100644 --- a/var/spack/repos/builtin/packages/openspeedshop/package.py +++ b/var/spack/repos/builtin/packages/openspeedshop/package.py @@ -1,5 +1,5 @@ ################################################################################ -# Copyright (c) 2015 Krell Institute. All Rights Reserved. +# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -28,20 +28,15 @@ class Openspeedshop(Package): as open source code primarily under LGPL. """ - homepage = "http://www.openspeedshop.org" - url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.2/openspeedshop-2.2.tar.gz/download" + url = "https://github.com/OpenSpeedShop" version('2.2', '16cb051179c2038de4e8a845edf1d573') + # Use when the git repository is available + version('2.2', branch='master', git='https://github.com/OpenSpeedShop/openspeedshop.git') - #homepage = "http://www.openspeedshop.org" - #url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.1/openspeedshop-2.1.tar.gz/download" - #version('2.1', 'bdaa57c1a0db9d0c3e0303fd8496c507') - - # optional mirror template - #url = "file:/g/g24/jeg/openspeedshop-2.1.tar.gz" - #version('2.1', '64ee17166519838c7b94a1adc138e94f') - - + # Optional mirror template + #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz" + #version('2.2', '643337740dc6c2faca60f42d3620b0e1') parallel = False @@ -51,11 +46,17 @@ class Openspeedshop(Package): variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.") variant('cuda', default=False, description="build with cuda packages included.") variant('ptgf', default=False, description="build with the PTGF based gui package enabled.") - variant('intelmic', default=False, description="build for the Intel MIC platform.") - variant('cray', default=False, description="build for Cray platforms.") - variant('bluegene', default=False, description="build for Cray platforms.") variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.") + # MPI variants + variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.") + variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.") + variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.") + variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.") + variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.") + variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.") + + depends_on("cmake@3.0.2") # Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build depends_on("bison") depends_on("flex") @@ -63,8 +64,8 @@ class Openspeedshop(Package): depends_on("libelf") depends_on("libdwarf") depends_on("sqlite") - depends_on("boost@1.50.0") - depends_on("dyninst@8.2.1") + depends_on("boost@1.50.0:") + depends_on("dyninst@9.1.0") depends_on("python") depends_on("qt@3.3.8b+krellpatch") @@ -72,15 +73,78 @@ class Openspeedshop(Package): depends_on("libunwind", when='+offline') depends_on("papi", when='+offline') depends_on("libmonitor+krellpatch", when='+offline') - #depends_on("openmpi+krelloptions", when='+offline') - #depends_on("openmpi", when='+offline') - #depends_on("mpich", when='+offline') + depends_on("openmpi", when='+offline+openmpi') + depends_on("mpich", when='+offline+mpich') + depends_on("mpich2", when='+offline+mpich2') + depends_on("mvapich2", when='+offline+mvapich2') + depends_on("mvapich", when='+offline+mvapich') + depends_on("mpt", when='+offline+mpt') # Dependencies only for the openspeedshop cbtf package. depends_on("cbtf", when='+cbtf') depends_on("cbtf-krell", when='+cbtf') - depends_on("cbtf-argonavis", when='+cbtf') - depends_on("mrnet@4.1.0:+lwthreads", when='+cbtf') + depends_on("cbtf-argonavis", when='+cbtf+cuda') + depends_on("mrnet@5.0.1:+lwthreads+krellpatch", when='+cbtf') + + def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): + # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + + compile_flags="-O2 -g" + BuildTypeOptions = [] + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + for word in cmakeOptions[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_CXX_FLAGS'): + cmakeOptions.remove(word) + if word.startswith('-DCMAKE_C_FLAGS'): + cmakeOptions.remove(word) + BuildTypeOptions.extend([ + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags + ]) + + cmakeOptions.extend(BuildTypeOptions) + + def set_mpi_cmakeOptions(self, spec, cmakeOptions): + # Appends to cmakeOptions the options that will enable the appropriate MPI implementations + + MPIOptions = [] + + # openmpi + if '+openmpi' in spec: + MPIOptions.extend([ + '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix + ]) + # mpich + if '+mpich' in spec: + MPIOptions.extend([ + '-DMPICH_DIR=%s' % spec['mpich'].prefix + ]) + # mpich2 + if '+mpich2' in spec: + MPIOptions.extend([ + '-DMPICH2_DIR=%s' % spec['mpich2'].prefix + ]) + # mvapich + if '+mvapich' in spec: + MPIOptions.extend([ + '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix + ]) + # mvapich2 + if '+mvapich2' in spec: + MPIOptions.extend([ + '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix + ]) + # mpt + if '+mpt' in spec: + MPIOptions.extend([ + '-DMPT_DIR=%s' % spec['mpt'].prefix + ]) + + cmakeOptions.extend(MPIOptions) + def install(self, spec, prefix): @@ -100,51 +164,118 @@ class Openspeedshop(Package): instrumentor_setting = "offline" if '+runtime' in spec: with working_dir('build_runtime', create=True): - cmake('..', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix, - *std_cmake_args) + + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix + ]) + + # Add any MPI implementations coming from variant settings + self.set_mpi_cmakeOptions(spec, cmakeOptions) + cmakeOptions.extend(std_cmake_args) + + # Adjust the build options to the favored ones for this build + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + cmake('..', *cmakeOptions) + make("clean") make() make("install") else: cmake_prefix_path = join_path(spec['dyninst'].prefix) with working_dir('build', create=True): + #python_vers=join_path(spec['python'].version[:2]) #'-DOPENMPI_DIR=%s' % openmpi_prefix_path, #'-DMVAPICH_DIR=%s' % mvapich_prefix_path, + #'-DMPICH_DIR=%s' % spec['mpich'].prefix, + #'-DMPICH2_DIR=%s' % spec['mpich2'].prefix, + #'-DBoost_NO_SYSTEM_PATHS=TRUE', + #'-DBOOST_ROOT=%s' % spec['boost'].prefix, + #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, + python_vers='%d.%d' % spec['python'].version[:2] - cmake('..', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBELF_DIR=%s' % spec['libelf'].prefix, - '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix, - '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, - '-DQTLIB_DIR=%s' % spec['qt'].prefix, - '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'), - '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers, - '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so', - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - *std_cmake_args) + + cmakeOptions = [] + cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix, + '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, + '-DQTLIB_DIR=%s' % spec['qt'].prefix, + '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'), + '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers, + '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so', + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix + ]) + + # Add any MPI implementations coming from variant settings + self.set_mpi_cmakeOptions(spec, cmakeOptions) + cmakeOptions.extend(std_cmake_args) + + # Adjust the build options to the favored ones for this build + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + cmake('..', *cmakeOptions) + make("clean") make() make("install") elif '+cbtf' in spec: instrumentor_setting = "cbtf" + resolve_symbols = "symtabapi" cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix) + #runtime_platform_cray = "cray" + #if '+cray' in spec: + # if '+runtime' in spec: + # #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \ + # with working_dir('build_cbtf_cray_runtime', create=True): + # python_vers='%d.%d' % spec['python'].version[:2] + # cmake('..', + # '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + # '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + # '-DRUNTIME_PLATFORM=%s' % runtime_platform_cray, + # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + # '-DRESOLVE_SYMBOLS=%s' % resolve_symbols, + # '-DINSTRUMENTOR=%s' % instrumentor_setting, + # '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + # '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % spec['cbtf-krell'].prefix, + # '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + # '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + # '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + # '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + # '-DPAPI_DIR=%s' % spec['papi'].prefix, + # '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + # '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + # '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + # '-DBoost_NO_SYSTEM_PATHS=TRUE', + # '-DBOOST_ROOT=%s' % spec['boost'].prefix, + # *std_cmake_args) + + # make("clean") + # make() + # make("install") + + + #elif '+mic' in spec: + # comment out else and shift over the default case below until arch detection is in + #else: + if '+runtime' in spec: with working_dir('build_cbtf_runtime', create=True): python_vers='%d.%d' % spec['python'].version[:2] @@ -203,14 +334,63 @@ class Openspeedshop(Package): # tbd - #if '+intelmic' in spec: - # with working_dir('build_intelmic_compute', create=True): - # tbd - # with working_dir('build_intelmic_frontend', create=True): - # tbd - #if '+cray' in spec: - # with working_dir('build_cray_compute', create=True): - # tbd - # with working_dir('build_cray_frontend', create=True): - # tbd + #if '+cbtf' in spec: + # if cray build type detected: + # if '+runtime' in spec: + # with working_dir('build_cray_cbtf_compute', create=True): + # tbd + # else: + # with working_dir('build_cray_cbtf_frontend', create=True): + # tbd + # with working_dir('build_cray_osscbtf_frontend', create=True): + # tbd + # fi + # elif '+intelmic' in spec: + # if '+runtime' in spec: + # with working_dir('build_intelmic_cbtf_compute', create=True): + # tbd + # else: + # with working_dir('build_intelmic_cbtf_frontend', create=True): + # tbd + # with working_dir('build_intelmic_osscbtf_frontend', create=True): + # fi + # else + # with working_dir('build_cluster_cbtf', create=True): + # tbd + # with working_dir('build_cluster osscbtf', create=True): + # tbd + # fi + #elif '+offline' in spec: + # if cray build type detected: + # if '+runtime' in spec: + # with working_dir('build_cray_ossoff_compute', create=True): + # tbd + # else: + # with working_dir('build_cray_ossoff_frontend', create=True): + # tbd + # fi + # elif '+intelmic' in spec: + # if '+runtime' in spec: + # with working_dir('build_intelmic_ossoff_compute', create=True): + # tbd + # else: + # with working_dir('build_intelmic_ossoff_frontend', create=True): + # tbd + # fi + # elif bgq build type detected: + # if '+runtime' in spec: + # with working_dir('build_bgq_ossoff_compute', create=True): + # tbd + # else: + # with working_dir('build_bgq_ossoff_frontend', create=True): + # tbd + # fi + # else + # with working_dir('build_cluster ossoff', create=True): + # tbd + # fi + #fi + + + diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 70afaf4038..d0c95731a2 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -3,6 +3,7 @@ import llnl.util.tty as tty from spack import * + class Openssl(Package): """The OpenSSL Project is a collaborative effort to develop a robust, commercial-grade, full-featured, and Open Source @@ -14,10 +15,12 @@ class Openssl(Package): version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') version('1.0.1r', '1abd905e079542ccae948af37e393d28') + version('1.0.1t', '9837746fcf8a6727d46d22ca35953da1') version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a') version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5') version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d') version('1.0.2g', 'f3c710c045cdee5fd114feb69feba7aa') + version('1.0.2h', '9392e65072ce4b614c1392eefc1f23d0') depends_on("zlib") parallel = False @@ -30,26 +33,14 @@ class Openssl(Package): # Same idea, but just to avoid issuing the same message multiple times warnings_given_to_user = getattr(Openssl, '_warnings_given', {}) if openssl_url is None: - latest = 'http://www.openssl.org/source/openssl-{version}.tar.gz' - older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz' - # Try to use the url where the latest tarballs are stored. If the url does not exist (404), then - # return the url for older format - version_number = '.'.join([str(x) for x in version[:-1]]) - older_url = older.format(version_number=version_number, version_full=version) - latest_url = latest.format(version=version) - response = urllib.urlopen(latest.format(version=version)) - if response.getcode() == 404: - openssl_url = older_url - # Checks if we already warned the user for this particular version of OpenSSL. - # If not we display a warning message and mark this version + if self.spec.satisfies('@external'): + # The version @external is reserved to system openssl. In that case return a fake url and exit + openssl_url = '@external (reserved version for system openssl)' if not warnings_given_to_user.get(version, False): - tty.warn('This installation depends on an old version of OpenSSL, which may have known security issues. ') - tty.warn('Consider updating to the latest version of this package.') - tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage)) + tty.msg('Using openssl@external : the version @external is reserved for system openssl') warnings_given_to_user[version] = True else: - openssl_url = latest_url - # Store the computed URL + openssl_url = self.check_for_outdated_release(version, warnings_given_to_user) # Store the computed URL openssl_urls[version] = openssl_url # Store the updated dictionary of URLS Openssl._openssl_url = openssl_urls @@ -58,6 +49,28 @@ class Openssl(Package): return openssl_url + def check_for_outdated_release(self, version, warnings_given_to_user): + latest = 'ftp://ftp.openssl.org/source/openssl-{version}.tar.gz' + older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz' + # Try to use the url where the latest tarballs are stored. If the url does not exist (404), then + # return the url for older format + version_number = '.'.join([str(x) for x in version[:-1]]) + try: + openssl_url = latest.format(version=version) + urllib.urlopen(openssl_url) + except IOError: + openssl_url = older.format(version_number=version_number, version_full=version) + # Checks if we already warned the user for this particular version of OpenSSL. + # If not we display a warning message and mark this version + if not warnings_given_to_user.get(version, False): + tty.warn( + 'This installation depends on an old version of OpenSSL, which may have known security issues. ') + tty.warn('Consider updating to the latest version of this package.') + tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage)) + warnings_given_to_user[version] = True + + return openssl_url + def install(self, spec, prefix): # OpenSSL uses a variable APPS in its Makefile. If it happens to be set # in the environment, then this will override what is set in the diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py new file mode 100644 index 0000000000..01054471a3 --- /dev/null +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -0,0 +1,38 @@ +from spack import * + +class OsuMicroBenchmarks(Package): + """The Ohio MicroBenchmark suite is a collection of independent MPI + message passing performance microbenchmarks developed and written at + The Ohio State University. It includes traditional benchmarks and + performance measures such as latency, bandwidth and host overhead + and can be used for both traditional and GPU-enhanced nodes.""" + + homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/" + url = "http://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-5.3.tar.gz" + + version('5.3', '42e22b931d451e8bec31a7424e4adfc2') + + variant('cuda', default=False, description="Enable CUDA support") + + depends_on('mpi') + depends_on('cuda', when='+cuda') + + + def install(self, spec, prefix): + config_args = [ + 'CC=%s' % spec['mpi'].prefix.bin + '/mpicc', + 'CXX=%s' % spec['mpi'].prefix.bin + '/mpicxx', + 'LDFLAGS=-lrt', + '--prefix=%s' % prefix + ] + + if '+cuda' in spec: + config_args.extend([ + '--enable-cuda', + '--with-cuda=%s' % spec['cuda'].prefix, + ]) + + configure(*config_args) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py index 1e2969fe64..adf75babb9 100644 --- a/var/spack/repos/builtin/packages/p4est/package.py +++ b/var/spack/repos/builtin/packages/p4est/package.py @@ -7,10 +7,17 @@ class P4est(Package): version('1.1', '37ba7f4410958cfb38a2140339dbf64f') - # disable by default to make it work on frontend of clusters - variant('tests', default=False, description='Run small tests') + variant('tests', default=True, description='Run small tests') + # build dependencies + depends_on('automake') + depends_on('autoconf') + depends_on('libtool@2.4.2:') + + # other dependencies + depends_on('lua') # Needed for the submodule sc depends_on('mpi') + depends_on('zlib') def install(self, spec, prefix): options = ['--enable-mpi', @@ -19,16 +26,20 @@ class P4est(Package): '--without-blas', 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL', 'CFLAGS=-O2', - 'CC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # TODO: use ENV variables or MPI class wrappers - 'CXX=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'), - 'FC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), - 'F77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'), + 'CC=%s' % self.spec['mpi'].mpicc, + 'CXX=%s' % self.spec['mpi'].mpicxx, + 'FC=%s' % self.spec['mpi'].mpifc, + 'F77=%s' % self.spec['mpi'].mpif77 ] configure('--prefix=%s' % prefix, *options) make() + # Make tests optional as sometimes mpiexec can't be run with an error: + # mpiexec has detected an attempt to run as root. + # Running at root is *strongly* discouraged as any mistake (e.g., in + # defining TMPDIR) or bug can result in catastrophic damage to the OS + # file system, leaving your system in an unusable state. if '+tests' in self.spec: - make("check") - + make("check") make("install") diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index 53d69e28d9..74b3ea9ef9 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -1,5 +1,8 @@ from spack import * +import glob import os +import sys +from llnl.util.filesystem import fix_darwin_install_name class Papi(Package): """PAPI provides the tool designer and application engineer with a @@ -18,17 +21,27 @@ class Papi(Package): version('5.3.0', '367961dd0ab426e5ae367c2713924ffb') def install(self, spec, prefix): - os.chdir("src/") + with working_dir("src"): - configure_args=["--prefix=%s" % prefix] + configure_args=["--prefix=%s" % prefix] - # PAPI uses MPI if MPI is present; since we don't require an - # MPI package, we ensure that all attempts to use MPI fail, so - # that PAPI does not get confused - configure_args.append('MPICC=:') + # PAPI uses MPI if MPI is present; since we don't require + # an MPI package, we ensure that all attempts to use MPI + # fail, so that PAPI does not get confused + configure_args.append('MPICC=:') - configure(*configure_args) + configure(*configure_args) - make() - make("install") + # Don't use <malloc.h> + for level in [".", "*", "*/*"]: + files = glob.iglob(join_path(level, "*.[ch]")) + filter_file(r"\<malloc\.h\>", "<stdlib.h>", *files) + make() + make("install") + + # The shared library is not installed correctly on Darwin + if sys.platform == 'darwin': + os.rename(join_path(prefix.lib, 'libpapi.so'), + join_path(prefix.lib, 'libpapi.dylib')) + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/paradiseo/enable_eoserial.patch b/var/spack/repos/builtin/packages/paradiseo/enable_eoserial.patch new file mode 100644 index 0000000000..8b3ccfeb84 --- /dev/null +++ b/var/spack/repos/builtin/packages/paradiseo/enable_eoserial.patch @@ -0,0 +1,14 @@ +diff --git a/eo/src/CMakeLists.txt b/eo/src/CMakeLists.txt +index b2b445a..d45ddc7 100644 +--- a/eo/src/CMakeLists.txt ++++ b/eo/src/CMakeLists.txt +@@ -47,7 +47,7 @@ install(DIRECTORY do es ga gp other utils + add_subdirectory(es) + add_subdirectory(ga) + add_subdirectory(utils) +-#add_subdirectory(serial) ++add_subdirectory(serial) # Required when including <paradiseo/eo/utils/eoTimer.h> , which is need by <paradiseo/eo/mpi/eoMpi.h> + + if(ENABLE_PYEO) + add_subdirectory(pyeo) +
\ No newline at end of file diff --git a/var/spack/repos/builtin/packages/paradiseo/fix_osx_detection.patch b/var/spack/repos/builtin/packages/paradiseo/fix_osx_detection.patch new file mode 100644 index 0000000000..27b240f673 --- /dev/null +++ b/var/spack/repos/builtin/packages/paradiseo/fix_osx_detection.patch @@ -0,0 +1,13 @@ +diff --git a/cmake/Config.cmake b/cmake/Config.cmake +index 02593ba..d198ca9 100644 +--- a/cmake/Config.cmake ++++ b/cmake/Config.cmake +@@ -6,7 +6,7 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + + # detect OS X version. (use '/usr/bin/sw_vers -productVersion' to extract V from '10.V.x'.) + execute_process (COMMAND /usr/bin/sw_vers -productVersion OUTPUT_VARIABLE MACOSX_VERSION_RAW) +- string(REGEX REPLACE "10\\.([0-9]).*" "\\1" MACOSX_VERSION "${MACOSX_VERSION_RAW}") ++ string(REGEX REPLACE "10\\.([0-9]+).*" "\\1" MACOSX_VERSION "${MACOSX_VERSION_RAW}") + if(${MACOSX_VERSION} LESS 5) + message(FATAL_ERROR "Unsupported version of OS X : ${MACOSX_VERSION_RAW}") + return() diff --git a/var/spack/repos/builtin/packages/paradiseo/fix_tests.patch b/var/spack/repos/builtin/packages/paradiseo/fix_tests.patch new file mode 100644 index 0000000000..607c5d5262 --- /dev/null +++ b/var/spack/repos/builtin/packages/paradiseo/fix_tests.patch @@ -0,0 +1,13 @@ +diff --git a/moeo/test/t-moeo2DMinHypervolumeArchive.cpp b/moeo/test/t-moeo2DMinHypervolumeArchive.cpp +index 994a9a4..c4ba77b 100644 +--- a/moeo/test/t-moeo2DMinHypervolumeArchive.cpp ++++ b/moeo/test/t-moeo2DMinHypervolumeArchive.cpp +@@ -41,7 +41,7 @@ + #include <moeo> + #include <cassert> + +-#include<archive/moeo2DMinHyperVolumeArchive.h> ++#include<archive/moeo2DMinHypervolumeArchive.h> + + //----------------------------------------------------------------------------- + diff --git a/var/spack/repos/builtin/packages/paradiseo/fix_tutorials.patch b/var/spack/repos/builtin/packages/paradiseo/fix_tutorials.patch new file mode 100644 index 0000000000..14cb5fed74 --- /dev/null +++ b/var/spack/repos/builtin/packages/paradiseo/fix_tutorials.patch @@ -0,0 +1,13 @@ +diff --git a/eo/tutorial/Lesson3/exercise3.1.cpp b/eo/tutorial/Lesson3/exercise3.1.cpp +index dc37479..d178941 100644 +--- a/eo/tutorial/Lesson3/exercise3.1.cpp ++++ b/eo/tutorial/Lesson3/exercise3.1.cpp +@@ -289,7 +289,7 @@ void main_function(int argc, char **argv) + checkpoint.add(fdcStat); + + // The Stdout monitor will print parameters to the screen ... +- eoStdoutMonitor monitor(false); ++ eoStdoutMonitor monitor; + + // when called by the checkpoint (i.e. at every generation) + checkpoint.add(monitor); diff --git a/var/spack/repos/builtin/packages/paradiseo/package.py b/var/spack/repos/builtin/packages/paradiseo/package.py new file mode 100644 index 0000000000..34766099da --- /dev/null +++ b/var/spack/repos/builtin/packages/paradiseo/package.py @@ -0,0 +1,66 @@ +from spack import * +import sys + +class Paradiseo(Package): + """A C++ white-box object-oriented framework dedicated to the reusable design of metaheuristics.""" + homepage = "http://paradiseo.gforge.inria.fr/" + + # Installing from the development version is a better option at this + # point than using the very old supplied packages + version('head', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git') + # This is a version that the package formula author has tested successfully. + # However, the clone is very large (~1Gb git history). The history in the + # head version has been trimmed significantly. + version('dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git', + commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6') + + variant('mpi', default=True, description='Compile with parallel and distributed metaheuristics module') + variant('smp', default=True, description='Compile with symmetric multi-processing module ') + variant('edo', default=True, description='Compile with (Experimental) EDO module') + #variant('tests', default=False, description='Compile with build tests') + #variant('doc', default=False, description='Compile with documentation') + variant('debug', default=False, description='Builds a debug version of the libraries') + variant('openmp', default=False, description='Enable OpenMP support') + variant('gnuplot', default=False, description='Enable GnuPlot support') + + # Required dependencies + depends_on ("cmake") + + # Optional dependencies + depends_on ("mpi", when="+mpi") + depends_on ("doxygen", when='+doc') + depends_on ("gnuplot", when='+gnuplot') + depends_on ("eigen", when='+edo') + depends_on ("boost~mpi", when='+edo~mpi') + depends_on ("boost+mpi", when='+edo+mpi') + + # Patches + patch('enable_eoserial.patch') + patch('fix_osx_detection.patch') + patch('fix_tests.patch') + patch('fix_tutorials.patch') + + def install(self, spec, prefix): + options = [] + options.extend(std_cmake_args) + + options.extend([ + '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), + '-DINSTALL_TYPE:STRING=MIN', + '-DMPI:BOOL=%s' % ('TRUE' if '+mpi' in spec else 'FALSE'), + '-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'), # Note: This requires a C++11 compatible compiler + '-DEDO:BOOL=%s' % ('TRUE' if '+edo' in spec else 'FALSE'), + '-DENABLE_CMAKE_TESTING:BOOL=%s' % ('TRUE' if '+tests' in spec else 'FALSE'), + '-DENABLE_OPENMP:BOOL=%s' % ('TRUE' if '+openmp' in spec else 'FALSE'), + '-DENABLE_GNUPLOT:BOOL=%s' % ('TRUE' if '+gnuplot' in spec else 'FALSE') + ]) + + with working_dir('spack-build', create=True): + # Configure + cmake('..', *options) + + # Build, test and install + make("VERBOSE=1") + if '+tests' in spec: + make("test") + make("install") diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index 62a8f7ca0b..e6f8cf026b 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -8,6 +8,7 @@ class ParallelNetcdf(Package): homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf" url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz" + version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0') version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34') depends_on("m4") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index c16054816c..60f8d3c243 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -27,13 +27,14 @@ class Paraview(Package): depends_on('bzip2') depends_on('freetype') - depends_on('hdf5+mpi', when='+mpi') - depends_on('hdf5~mpi', when='~mpi') + #depends_on('hdf5+mpi', when='+mpi') + #depends_on('hdf5~mpi', when='~mpi') depends_on('jpeg') depends_on('libpng') depends_on('libtiff') depends_on('libxml2') - depends_on('netcdf') + #depends_on('netcdf') + #depends_on('netcdf-cxx') #depends_on('protobuf') # version mismatches? #depends_on('sqlite') # external version not supported depends_on('zlib') @@ -75,13 +76,13 @@ class Paraview(Package): cmake('..', '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, '-DBUILD_TESTING:BOOL=OFF', - '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON', - '-DVTK_USER_SYSTEM_HDF5:BOOL=ON', - '-DVTK_USER_SYSTEM_JPEG:BOOL=ON', - '-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON', - '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON', - '-DVTK_USER_SYSTEM_TIFF:BOOL=ON', - '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON', + '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON', + '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF', + '-DVTK_USE_SYSTEM_JPEG:BOOL=ON', + '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON', + '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF', + '-DVTK_USE_SYSTEM_TIFF:BOOL=ON', + '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON', *feature_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py index ff4370aa4b..b49f8dae00 100644 --- a/var/spack/repos/builtin/packages/parmetis/package.py +++ b/var/spack/repos/builtin/packages/parmetis/package.py @@ -44,7 +44,7 @@ class Parmetis(Package): depends_on('mpi') patch('enable_external_metis.patch') - depends_on('metis') + depends_on('metis@5:') # bug fixes from PETSc developers # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 5c1fc6cc92..1161dd7d67 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -40,7 +40,7 @@ class Petsc(Package): # Other dependencies depends_on('boost', when='+boost') - depends_on('metis', when='+metis') + depends_on('metis@5:', when='+metis') depends_on('hdf5+mpi', when='+hdf5+mpi') depends_on('parmetis', when='+metis+mpi') diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py index 9964c6ce34..a803bc3f9b 100644 --- a/var/spack/repos/builtin/packages/pkg-config/package.py +++ b/var/spack/repos/builtin/packages/pkg-config/package.py @@ -10,7 +10,12 @@ class PkgConfig(Package): parallel = False def install(self, spec, prefix): - configure("--prefix=%s" %prefix, "--enable-shared") + configure("--prefix=%s" %prefix, + "--enable-shared", + "--with-internal-glib") # There's a bootstrapping problem here; + # glib uses pkg-config as well, so + # break the cycle by using the internal + # glib. make() make("install") diff --git a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py new file mode 100644 index 0000000000..9aecc95c63 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PySqlalchemy(Package): + """The Python SQL Toolkit and Object Relational Mapper""" + + homepage = 'http://www.sqlalchemy.org/' + url = "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-1.0.12.tar.gz" + + version('1.0.12', '6d19ef29883bbebdcac6613cf391cac4') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py new file mode 100644 index 0000000000..c94ef7238b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyArgcomplete(Package): + """Bash tab completion for argparse.""" + + homepage = "https://pypi.python.org/pypi/argcomplete" + url = "https://pypi.python.org/packages/source/a/argcomplete/argcomplete-1.1.1.tar.gz" + + version('1.1.1', '89a3839096c9f991ad33828e72d21abf') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-bottleneck/package.py b/var/spack/repos/builtin/packages/py-bottleneck/package.py index 0aa4208b4d..d43308543b 100644 --- a/var/spack/repos/builtin/packages/py-bottleneck/package.py +++ b/var/spack/repos/builtin/packages/py-bottleneck/package.py @@ -7,7 +7,7 @@ class PyBottleneck(Package): version('1.0.0', '380fa6f275bd24f27e7cf0e0d752f5d2') - extends('python', ignore=r'bin/f2py$') + extends('python') depends_on('py-numpy') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/py-csvkit/package.py b/var/spack/repos/builtin/packages/py-csvkit/package.py new file mode 100644 index 0000000000..def30457be --- /dev/null +++ b/var/spack/repos/builtin/packages/py-csvkit/package.py @@ -0,0 +1,22 @@ +from spack import * + +class PyCsvkit(Package): + """A library of utilities for working with CSV, the king of tabular file + formats""" + + homepage = 'http://csvkit.rtfd.org/' + url = "https://pypi.python.org/packages/source/c/csvkit/csvkit-0.9.1.tar.gz" + + version('0.9.1', '48d78920019d18846933ee969502fff6') + + extends('python') + + depends_on('py-dateutil') + depends_on('py-dbf') + depends_on('py-xlrd') + depends_on('py-SQLAlchemy') + depends_on('py-six') + depends_on('py-openpyxl') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dbf/package.py b/var/spack/repos/builtin/packages/py-dbf/package.py new file mode 100644 index 0000000000..698b221903 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dbf/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyDbf(Package): + """Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro + .dbf files (including memos)""" + + homepage = 'https://pypi.python.org/pypi/dbf' + url = "https://pypi.python.org/packages/source/d/dbf/dbf-0.96.005.tar.gz" + + version('0.96.005', 'bce1a1ed8b454a30606e7e18dd2f8277') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-jdcal/package.py b/var/spack/repos/builtin/packages/py-jdcal/package.py new file mode 100644 index 0000000000..54169b2c21 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-jdcal/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyJdcal(Package): + """Julian dates from proleptic Gregorian and Julian calendars""" + + homepage = 'http://github.com/phn/jdcal' + url = "https://pypi.python.org/packages/source/j/jdcal/jdcal-1.2.tar.gz" + + version('1.2', 'ab8d5ba300fd1eb01514f363d19b1eb9') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 45e77dd631..1a190cc5f3 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -12,7 +12,7 @@ class PyMatplotlib(Package): variant('gui', default=False, description='Enable GUI') variant('ipython', default=False, description='Enable ipython support') - extends('python', ignore=r'bin/nosetests.*$|bin/pbr$|bin/f2py$') + extends('python', ignore=r'bin/nosetests.*$|bin/pbr$') depends_on('py-pyside', when='+gui') depends_on('py-ipython', when='+ipython') @@ -26,6 +26,7 @@ class PyMatplotlib(Package): depends_on('py-pbr') depends_on('py-funcsigs') + depends_on('pkg-config') depends_on('freetype') depends_on('qt', when='+gui') depends_on('bzip2') diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py index 081a79dec6..0076aa456b 100644 --- a/var/spack/repos/builtin/packages/py-numexpr/package.py +++ b/var/spack/repos/builtin/packages/py-numexpr/package.py @@ -9,7 +9,7 @@ class PyNumexpr(Package): version('2.4.6', '17ac6fafc9ea1ce3eb970b9abccb4fbd') version('2.5', '84f66cced45ba3e30dcf77a937763aaa') - extends('python', ignore=r'bin/f2py$') + extends('python') depends_on('py-numpy') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/py-openpyxl/package.py b/var/spack/repos/builtin/packages/py-openpyxl/package.py new file mode 100644 index 0000000000..87ff9f3521 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-openpyxl/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PyOpenpyxl(Package): + """A Python library to read/write Excel 2007 xlsx/xlsm files""" + + homepage = 'http://openpyxl.readthedocs.org/' + url = "https://pypi.python.org/packages/source/o/openpyxl/openpyxl-2.4.0-a1.tar.gz" + + version('2.4.0-a1', 'e5ca6d23ceccb15115d45cdf26e736fc') + + extends('python') + + depends_on('py-jdcal') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 2320b1f92f..59d515eb5e 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -10,7 +10,7 @@ class PyPandas(Package): version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8') version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6') - extends('python', ignore=r'bin/f2py$') + extends('python') depends_on('py-dateutil') depends_on('py-numpy') depends_on('py-setuptools') diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py index 22ce1f8374..d13339060e 100644 --- a/var/spack/repos/builtin/packages/py-scikit-image/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py @@ -7,7 +7,7 @@ class PyScikitImage(Package): version('0.12.3', '04ea833383e0b6ad5f65da21292c25e1') - extends('python', ignore=r'bin/.*\.py$|bin/f2py$') + extends('python', ignore=r'bin/.*\.py$') depends_on('py-dask') depends_on('py-pillow') diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index c6d9be1add..1368711978 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -5,11 +5,13 @@ class PySetuptools(Package): homepage = "https://pypi.python.org/pypi/setuptools" url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" - version('11.3.1', '01f69212e019a2420c1693fb43593930') - version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') - version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') - version('19.2', '78353b1f80375ca5e088f4b4627ffe03') + version('20.7.0', '5d12b39bf3e75e80fdce54e44b255615') + version('20.6.7', '45d6110f3ec14924e44c33411db64fe6') version('20.5', 'fadc1e1123ddbe31006e5e43e927362b') + version('19.2', '78353b1f80375ca5e088f4b4627ffe03') + version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') + version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') + version('11.3.1', '01f69212e019a2420c1693fb43593930') extends('python') diff --git a/var/spack/repos/builtin/packages/py-xlrd/package.py b/var/spack/repos/builtin/packages/py-xlrd/package.py new file mode 100644 index 0000000000..8f25c06aad --- /dev/null +++ b/var/spack/repos/builtin/packages/py-xlrd/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyXlrd(Package): + """Library for developers to extract data from Microsoft Excel (tm) + spreadsheet files""" + + homepage = 'http://www.python-excel.org/' + url = "https://pypi.python.org/packages/source/x/xlrd/xlrd-0.9.4.tar.gz" + + version('0.9.4', '911839f534d29fe04525ef8cd88fe865') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index f5237c3b57..f7e1d94567 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -151,6 +151,8 @@ class Python(Package): patterns.append(r'setuptools\.pth') patterns.append(r'bin/easy_install[^/]*$') patterns.append(r'setuptools.*egg$') + if ext_pkg.name != 'py-numpy': + patterns.append(r'bin/f2py$') return match_predicate(ignore_arg, patterns) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 8cb88e6c85..1e1d6302f3 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -29,7 +29,8 @@ class Qt(Package): depends_on("zlib") depends_on("dbus", when='@4:') depends_on("libtiff") - depends_on("libpng") + depends_on("libpng@1.2.56", when='@3') + depends_on("libpng", when='@4:') depends_on("libmng") depends_on("jpeg") @@ -100,7 +101,7 @@ class Qt(Package): @property def common_config_args(self): - return [ + config_args = [ '-prefix', self.prefix, '-v', '-opensource', @@ -114,19 +115,29 @@ class Qt(Package): '-no-openvg', '-no-pch', # NIS is deprecated in more recent glibc - '-no-nis'] + '-no-nis' + ] + + if '+gtk' in self.spec: + config_args.append('-gtkstyle') + else: + config_args.append('-no-gtkstyle') + + return config_args + # Don't disable all the database drivers, but should # really get them into spack at some point. @when('@3') def configure(self): + # An user report that this was necessary to link Qt3 on ubuntu + os.environ['LD_LIBRARY_PATH'] = os.getcwd()+'/lib' configure('-prefix', self.prefix, '-v', '-thread', '-shared', '-release', - '-fast' - ) + '-fast') @when('@4') def configure(self): diff --git a/var/spack/repos/builtin/packages/qthreads/ldflags.patch b/var/spack/repos/builtin/packages/qthreads/ldflags.patch new file mode 100644 index 0000000000..0c15eab386 --- /dev/null +++ b/var/spack/repos/builtin/packages/qthreads/ldflags.patch @@ -0,0 +1,11 @@ +--- a/configure ++++ b/configure +@@ -40456,7 +40456,7 @@ + hwloc_saved_LDFLAGS="$LDFLAGS" + if test "x$with_hwloc" != x; then + CPPFLAGS="-I$with_hwloc/include $CPPFLAGS" +- LDFLAGS="-L$with_hwloc/lib $CPPFLAGS" ++ LDFLAGS="-L$with_hwloc/lib $LDFLAGS" + fi + + diff --git a/var/spack/repos/builtin/packages/qthreads/package.py b/var/spack/repos/builtin/packages/qthreads/package.py index dacdb71524..5da9340927 100644 --- a/var/spack/repos/builtin/packages/qthreads/package.py +++ b/var/spack/repos/builtin/packages/qthreads/package.py @@ -16,7 +16,12 @@ class Qthreads(Package): version('1.10', '5af8c8bbe88c2a6d45361643780d1671') + patch("ldflags.patch") + patch("restrict.patch") + patch("trap.patch") + def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure("--prefix=%s" % prefix, + "--enable-guard-pages") make() make("install") diff --git a/var/spack/repos/builtin/packages/qthreads/restrict.patch b/var/spack/repos/builtin/packages/qthreads/restrict.patch new file mode 100644 index 0000000000..4c95714f6b --- /dev/null +++ b/var/spack/repos/builtin/packages/qthreads/restrict.patch @@ -0,0 +1,12 @@ +--- a/include/qthread/common.h.in ++++ b/include/qthread/common.h.in +@@ -84,7 +84,9 @@ + /* Define to the equivalent of the C99 'restrict' keyword, or to + nothing if this is not supported. Do not define if restrict is + supported directly. */ ++#ifndef restrict + #undef restrict ++#endif + /* Work around a bug in Sun C++: it does not support _Restrict or + __restrict__, even though the corresponding Sun C compiler ends up with + "#define restrict _Restrict" or "#define restrict __restrict__" in the diff --git a/var/spack/repos/builtin/packages/qthreads/trap.patch b/var/spack/repos/builtin/packages/qthreads/trap.patch new file mode 100644 index 0000000000..7aa94d82d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/qthreads/trap.patch @@ -0,0 +1,11 @@ +--- a/include/qthread/qthread.hpp ++++ b/include/qthread/qthread.hpp +@@ -236,7 +236,7 @@ + return qthread_incr64((uint64_t *)operand, incr); + + default: +- *(int *)(0) = 0; ++ __builtin_trap(); + } + return T(0); // never hit - keep compiler happy + } diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py new file mode 100644 index 0000000000..f807ab71af --- /dev/null +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -0,0 +1,12 @@ +from spack import * + +class Raja(Package): + """RAJA Parallel Framework.""" + homepage = "http://software.llnl.gov/RAJA/" + + version('git', git='https://github.com/LLNL/RAJA.git', branch="master") + + def install(self, spec, prefix): + cmake('.',*std_cmake_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py index 1b870e0e7f..0c429ea756 100644 --- a/var/spack/repos/builtin/packages/readline/package.py +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -2,12 +2,12 @@ from spack import * class Readline(Package): """The GNU Readline library provides a set of functions for use by - applications that allow users to edit command li nes as they + applications that allow users to edit command lines as they are typed in. Both Emacs and vi editing modes are available. The Readline library includes additional functions to maintain a list of previously-entered command lines, to recall and perhaps reedit those lines, and perform csh-like - history expansion on previous commands. """ + history expansion on previous commands.""" homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" url = "ftp://ftp.cwru.edu/pub/bash/readline-6.3.tar.gz" diff --git a/var/spack/repos/builtin/packages/scons/package.py b/var/spack/repos/builtin/packages/scons/package.py new file mode 100644 index 0000000000..594aeced88 --- /dev/null +++ b/var/spack/repos/builtin/packages/scons/package.py @@ -0,0 +1,13 @@ +from spack import * + +class Scons(Package): + """SCons is a software construction tool""" + homepage = "http://scons.org" + url = "http://downloads.sourceforge.net/project/scons/scons/2.5.0/scons-2.5.0.tar.gz" + + version('2.5.0', '9e00fa0df8f5ca5c5f5975b40e0ed354') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/scotch/Makefile.esmumps b/var/spack/repos/builtin/packages/scotch/Makefile.esmumps new file mode 100644 index 0000000000..4bfc760197 --- /dev/null +++ b/var/spack/repos/builtin/packages/scotch/Makefile.esmumps @@ -0,0 +1,5 @@ +esmumps : scotch + (cd esmumps ; $(MAKE) scotch && $(MAKE) install) + +ptesmumps : ptscotch + (cd esmumps ; $(MAKE) ptscotch && $(MAKE) ptinstall) diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py index 8229ed8686..8fad74b24f 100644 --- a/var/spack/repos/builtin/packages/scotch/package.py +++ b/var/spack/repos/builtin/packages/scotch/package.py @@ -1,88 +1,125 @@ from spack import * -import os +import os, re class Scotch(Package): """Scotch is a software package for graph and mesh/hypergraph partitioning, graph clustering, and sparse matrix ordering.""" + homepage = "http://www.labri.fr/perso/pelegrin/scotch/" - url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz" + url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.3.tar.gz" + base_url = "http://gforge.inria.fr/frs/download.php/latestfile/298" list_url = "http://gforge.inria.fr/frs/?group_id=248" version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc') + version('6.0.0', 'c50d6187462ba801f9a82133ee666e8e') + version('5.1.10b', 'f587201d6cf5cf63527182fbfba70753') variant('mpi', default=False, description='Activate the compilation of PT-Scotch') variant('compression', default=True, description='Activate the posibility to use compressed files') variant('esmumps', default=False, description='Activate the compilation of the lib esmumps needed by mumps') variant('shared', default=True, description='Build shared libraries') - depends_on('mpi', when='+mpi') - depends_on('zlib', when='+compression') depends_on('flex') depends_on('bison') + depends_on('mpi', when='+mpi') + depends_on('zlib', when='+compression') - def compiler_specifics(self, makefile_inc, defines): - if self.compiler.name == 'gcc': - defines.append('-Drestrict=__restrict') - elif self.compiler.name == 'intel': - defines.append('-restrict') + # NOTE: Versions of Scotch up to version 6.0.0 don't include support for + # building with 'esmumps' in their default packages. In order to enable + # support for this feature, we must grab the 'esmumps' enabled archives + # from the Scotch hosting site. These alternative archives include a strict + # superset of the behavior in their default counterparts, so we choose to + # always grab these versions for older Scotch versions for simplicity. + @when('@:6.0.0') + def url_for_version(self, version): + return '%s/scotch_%s_esmumps.tar.gz' % (Scotch.base_url, version) + + @when('@6.0.1:') + def url_for_version(self, version): + return super(Scotch, self).url_for_version(version) + + # NOTE: Several of the 'esmumps' enabled Scotch releases up to version 6.0.0 + # have broken build scripts that don't properly build 'esmumps' as a separate + # target, so we need a patch procedure to remove 'esmumps' from existing targets + # and to add it as a standalone target. + @when('@:6.0.0') + def patch(self): + makefile_path = os.path.join('src', 'Makefile') + with open(makefile_path, 'r') as makefile: + esmumps_enabled = any(re.search(r'^esmumps(\s*):(.*)$', line) for line in makefile.readlines()) - makefile_inc.append('CCS = $(CC)') + if not esmumps_enabled: + mff = FileFilter(makefile_path) + mff.filter(r'^.*((esmumps)|(ptesmumps)).*(install).*$', '') - if '+mpi' in self.spec: + makefile_esmumps_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Makefile.esmumps') + with open(makefile_path, 'a') as makefile: + makefile.write('\ninclude %s\n' % makefile_esmumps_path) + + @when('@6.0.1:') + def patch(self): + pass + + # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc' file + # that contains all of the configuration variables and their desired values + # for the installation. This function writes this file based on the given + # installation variants. + def configure(self): + makefile_inc = [] + cflags = [ + '-O3', + '-DCOMMON_RANDOM_FIXED_SEED', + '-DSCOTCH_DETERMINISTIC', + '-DSCOTCH_RENAME', + '-DIDXSIZE64' + ] + + ## Library Build Type ## + + if '+shared' in self.spec: makefile_inc.extend([ - 'CCP = %s' % os.path.join(self.spec['mpi'].prefix.bin, 'mpicc'), - 'CCD = $(CCP)' - ]) + 'LIB = .so', + 'CLIBFLAGS = -shared -fPIC', + 'RANLIB = echo', + 'AR = $(CC)', + 'ARFLAGS = -shared $(LDFLAGS) -o' + ]) + cflags.append('-fPIC') else: makefile_inc.extend([ - 'CCP = mpicc', # It is set but not used - 'CCD = $(CCS)' - ]) + 'LIB = .a', + 'CLIBFLAGS = ', + 'RANLIB = ranlib', + 'AR = ar', + 'ARFLAGS = -ruv ' + ]) + ## Compiler-Specific Options ## + if self.compiler.name == 'gcc': + cflags.append('-Drestrict=__restrict') + elif self.compiler.name == 'intel': + cflags.append('-restrict') - def library_build_type(self, makefile_inc, defines): - makefile_inc.extend([ - 'LIB = .a', - 'CLIBFLAGS = ', - 'RANLIB = ranlib', - 'AR = ar', - 'ARFLAGS = -ruv ' - ]) + makefile_inc.append('CCS = $(CC)') + makefile_inc.append('CCP = %s' % + (self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc')) + makefile_inc.append('CCD = $(CCS)') - @when('+shared') - def library_build_type(self, makefile_inc, defines): - makefile_inc.extend([ - 'LIB = .so', - 'CLIBFLAGS = -shared -fPIC', - 'RANLIB = echo', - 'AR = $(CC)', - 'ARFLAGS = -shared $(LDFLAGS) -o' - ]) + ## Extra Features ## - def extra_features(self, makefile_inc, defines): ldflags = [] - + if '+compression' in self.spec: - defines.append('-DCOMMON_FILE_COMPRESS_GZ') + cflags.append('-DCOMMON_FILE_COMPRESS_GZ') ldflags.append('-L%s -lz' % (self.spec['zlib'].prefix.lib)) - defines.append('-DCOMMON_PTHREAD') + cflags.append('-DCOMMON_PTHREAD') ldflags.append('-lm -lrt -pthread') - - makefile_inc.append('LDFLAGS = %s' % ' '.join(ldflags)) - def patch(self): - makefile_inc = [] - defines = [ - '-DCOMMON_RANDOM_FIXED_SEED', - '-DSCOTCH_DETERMINISTIC', - '-DSCOTCH_RENAME', - '-DIDXSIZE64' ] + makefile_inc.append('LDFLAGS = %s' % ' '.join(ldflags)) - self.library_build_type(makefile_inc, defines) - self.compiler_specifics(makefile_inc, defines) - self.extra_features(makefile_inc, defines) + ## General Features ## makefile_inc.extend([ 'EXE =', @@ -93,18 +130,19 @@ class Scotch(Package): 'MKDIR = mkdir', 'MV = mv', 'CP = cp', - 'CFLAGS = -O3 %s' % (' '.join(defines)), + 'CFLAGS = %s' % ' '.join(cflags), 'LEX = %s -Pscotchyy -olex.yy.c' % os.path.join(self.spec['flex'].prefix.bin , 'flex'), 'YACC = %s -pscotchyy -y -b y' % os.path.join(self.spec['bison'].prefix.bin, 'bison'), - 'prefix = %s' % self.prefix, - '' + 'prefix = %s' % self.prefix ]) with working_dir('src'): with open('Makefile.inc', 'w') as fh: fh.write('\n'.join(makefile_inc)) - + def install(self, spec, prefix): + self.configure() + targets = ['scotch'] if '+mpi' in self.spec: targets.append('ptscotch') @@ -115,12 +153,10 @@ class Scotch(Package): targets.append('ptesmumps') with working_dir('src'): - for app in targets: - make(app, parallel=(not app=='ptesmumps')) + for target in targets: + make(target, parallel=(target!='ptesmumps')) - install_tree('bin', prefix.bin) install_tree('lib', prefix.lib) install_tree('include', prefix.include) install_tree('man/man1', prefix.share_man1) - diff --git a/var/spack/repos/builtin/packages/serf/package.py b/var/spack/repos/builtin/packages/serf/package.py new file mode 100644 index 0000000000..a5c9057b99 --- /dev/null +++ b/var/spack/repos/builtin/packages/serf/package.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Serf(Package): + """Apache Serf - a high performance C-based HTTP client library built upon the Apache Portable Runtime (APR) library""" + homepage = 'https://serf.apache.org/' + url = 'https://archive.apache.org/dist/serf/serf-1.3.8.tar.bz2' + + version('1.3.8', '1d45425ca324336ce2f4ae7d7b4cfbc5567c5446') + + depends_on('apr') + depends_on('apr-util') + depends_on('scons') + depends_on('expat') + depends_on('openssl') + + def install(self, spec, prefix): + scons = which("scons") + + options = ['PREFIX=%s' % prefix] + options.append('APR=%s' % spec['apr'].prefix) + options.append('APU=%s' % spec['apr-util'].prefix) + options.append('OPENSSL=%s' % spec['openssl'].prefix) + options.append('LINKFLAGS=-L%s/lib' % spec['expat'].prefix) + options.append('CPPFLAGS=-I%s/include' % spec['expat'].prefix) + + scons(*options) + scons('install') diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py index 5db1c3eb92..04cde94aad 100644 --- a/var/spack/repos/builtin/packages/subversion/package.py +++ b/var/spack/repos/builtin/packages/subversion/package.py @@ -37,6 +37,7 @@ class Subversion(Package): depends_on('apr-util') depends_on('zlib') depends_on('sqlite') + depends_on('serf') # Optional: We need swig if we want the Perl, Python or Ruby # bindings. @@ -54,6 +55,7 @@ class Subversion(Package): options.append('--with-apr-util=%s' % spec['apr-util'].prefix) options.append('--with-zlib=%s' % spec['zlib'].prefix) options.append('--with-sqlite=%s' % spec['sqlite'].prefix) + options.append('--with-serf=%s' % spec['serf'].prefix) #options.append('--with-swig=%s' % spec['swig'].prefix) configure(*options) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index ddcb7f9225..3420d9b90a 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -6,7 +6,9 @@ class SuperluDist(Package): homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/" url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz" - version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae') + version('5.0.0', '2b53baf1b0ddbd9fcf724992577f0670') + # default to version 4.3 since petsc and trilinos are not tested with 5.0. + version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae', preferred=True) version('4.2', 'ae9fafae161f775fbac6eba11e530a65') version('4.1', '4edee38cc29f687bd0c8eb361096a455') version('4.0', 'c0b98b611df227ae050bc1635c6940e0') @@ -15,7 +17,7 @@ class SuperluDist(Package): depends_on ('blas') depends_on ('lapack') depends_on ('parmetis') - depends_on ('metis') + depends_on ('metis@5:') def install(self, spec, prefix): makefile_inc = [] diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py index 8d46c4fe46..78a6c6bbae 100644 --- a/var/spack/repos/builtin/packages/swig/package.py +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -22,6 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## + from spack import * class Swig(Package): @@ -33,14 +34,19 @@ class Swig(Package): code. In addition, SWIG provides a variety of customization features that let you tailor the wrapping process to suit your application.""" + homepage = "http://www.swig.org" - url = "http://prdownloads.sourceforge.net/swig/swig-3.0.2.tar.gz" + url = "http://prdownloads.sourceforge.net/swig/swig-3.0.8.tar.gz" + version('3.0.8', 'c96a1d5ecb13d38604d7e92148c73c97') version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41') + version('2.0.12', 'c3fb0b2d710cc82ed0154b91e43085a4') + version('2.0.2', 'eaf619a4169886923e5f828349504a29') + version('1.3.40', '2df766c9e03e02811b1ab4bba1c7b9cc') depends_on('pcre') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure('--prefix=%s' % prefix) make() - make("install") + make('install') diff --git a/var/spack/repos/builtin/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the_silver_searcher/package.py index e4020b6766..30f06354bf 100644 --- a/var/spack/repos/builtin/packages/the_silver_searcher/package.py +++ b/var/spack/repos/builtin/packages/the_silver_searcher/package.py @@ -9,6 +9,7 @@ class TheSilverSearcher(Package): depends_on('pcre') depends_on('xz') + depends_on('pkg-config') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 6223848c68..0f72055fa7 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -42,7 +42,7 @@ class Trilinos(Package): depends_on('matio') depends_on('glm') depends_on('swig') - depends_on('metis',when='+metis') + depends_on('metis@5:',when='+metis') depends_on('suite-sparse',when='+suite-sparse') # MPI related dependencies diff --git a/var/spack/repos/builtin/packages/turbomole/package.py b/var/spack/repos/builtin/packages/turbomole/package.py new file mode 100644 index 0000000000..acc95e3b10 --- /dev/null +++ b/var/spack/repos/builtin/packages/turbomole/package.py @@ -0,0 +1,124 @@ +from spack import * +import os +import subprocess + +class Turbomole(Package): + """TURBOMOLE: Program Package for ab initio Electronic Structure + Calculations. NB: Requires a license to download.""" + + # NOTE: Turbomole requires purchase of a license to download. Go to the + # NOTE: Turbomole home page, http://www.turbomole-gmbh.com, for details. + # NOTE: Spack will search the current directory for this file. It is + # NOTE: probably best to add this file to a Spack mirror so that it can be + # NOTE: found from anywhere. For information on setting up a Spack mirror + # NOTE: see http://software.llnl.gov/spack/mirrors.html + + homepage = "http://www.turbomole-gmbh.com/" + + version('7.0.2', '92b97e1e52e8dcf02a4d9ac0147c09d6', + url="file://%s/turbolinux702.tar.gz" % os.getcwd()) + + variant('mpi', default=False, description='Set up MPI environment') + variant('smp', default=False, description='Set up SMP environment') + + # Turbomole's install is odd. There are three variants + # - serial + # - parallel, MPI + # - parallel, SMP + # + # Only one of these can be active at a time. MPI and SMP are set as + # variants so there could be up to 3 installs per version. Switching + # between them would be accomplished with `module swap` commands. + + def do_fetch(self, mirror_only=True): + if '+mpi' in self.spec and '+smp' in self.spec: + raise InstallError('Can not have both SMP and MPI enabled in the same build.') + super(Turbomole, self).do_fetch(mirror_only) + + def get_tm_arch(self): + # For python-2.7 we could use `tm_arch = subprocess.check_output()` + # Use the following for compatibility with python 2.6 + if 'TURBOMOLE' in os.getcwd(): + tm_arch = subprocess.Popen(['sh', 'scripts/sysname'], + stdout=subprocess.PIPE).communicate()[0] + return tm_arch.rstrip('\n') + else: + return + + def install(self, spec, prefix): + if spec.satisfies('@:7.0.2'): + calculate_version = 'calculate_2.4_linux64' + molecontrol_version = 'MoleControl_2.5' + + tm_arch=self.get_tm_arch() + + tar = which('tar') + dst = join_path(prefix, 'TURBOMOLE') + + tar('-x', '-z', '-f', 'thermocalc.tar.gz') + with working_dir('thermocalc'): + cmd = 'sh install <<<y' + subprocess.call(cmd, shell=True) + + install_tree('basen', join_path(dst, 'basen')) + install_tree('cabasen', join_path(dst, 'cabasen')) + install_tree(calculate_version, join_path(dst, calculate_version)) + install_tree('cbasen', join_path(dst, 'cbasen')) + install_tree('DOC', join_path(dst, 'DOC')) + install_tree('jbasen', join_path(dst, 'jbasen')) + install_tree('jkbasen', join_path(dst, 'jkbasen')) + install_tree(molecontrol_version, join_path(dst, molecontrol_version)) + install_tree('parameter', join_path(dst, 'parameter')) + install_tree('perlmodules', join_path(dst, 'perlmodules')) + install_tree('scripts', join_path(dst, 'scripts')) + install_tree('smprun_scripts', join_path(dst, 'smprun_scripts')) + install_tree('structures', join_path(dst, 'structures')) + install_tree('thermocalc', join_path(dst, 'thermocalc')) + install_tree('TURBOTEST', join_path(dst, 'TURBOTEST')) + install_tree('xbasen', join_path(dst, 'xbasen')) + + install('Config_turbo_env', dst) + install('Config_turbo_env.tcsh', dst) + install('README', dst) + install('README_LICENSES', dst) + install('TURBOMOLE_702_LinuxPC', dst) + + if '+mpi' in spec: + install_tree('bin/%s_mpi' % tm_arch, join_path(dst, 'bin', '%s_mpi' % tm_arch)) + install_tree('libso/%s_mpi' % tm_arch, join_path(dst, 'libso', '%s_mpi' % tm_arch)) + install_tree('mpirun_scripts/%s_mpi' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_mpi' % tm_arch)) + elif '+smp' in spec: + install_tree('bin/%s_smp' % tm_arch, join_path(dst, 'bin', '%s_smp' % tm_arch)) + install_tree('libso/%s_smp' % tm_arch, join_path(dst, 'libso', '%s_smp' % tm_arch)) + install_tree('mpirun_scripts/%s_smp' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_smp' % tm_arch)) + else: + install_tree('bin/%s' % tm_arch, join_path(dst, 'bin', tm_arch)) + if '+mpi' in spec or '+smp' in spec: + install('mpirun_scripts/ccsdf12', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/dscf', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/grad', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/mpgrad', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/pnoccsd', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/rdgrad', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/ricc2', join_path(dst, 'mpirun_scripts')) + install('mpirun_scripts/ridft', join_path(dst, 'mpirun_scripts')) + + def setup_environment(self, spack_env, run_env): + if self.spec.satisfies('@:7.0.2'): + molecontrol_version = 'MoleControl_2.5' + + tm_arch=self.get_tm_arch() + + run_env.set('TURBODIR', join_path(self.prefix, 'TURBOMOLE')) + run_env.set('MOLE_CONTROL', join_path(self.prefix, 'TURBOMOLE', molecontrol_version)) + + run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'thermocalc')) + run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'scripts')) + if '+mpi' in self.spec: + run_env.set('PARA_ARCH', 'MPI') + run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_mpi' % tm_arch)) + elif '+smp' in self.spec: + run_env.set('PARA_ARCH', 'SMP') + run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_smp' % tm_arch)) + else: + run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', tm_arch)) diff --git a/var/spack/repos/builtin/packages/unibilium/package.py b/var/spack/repos/builtin/packages/unibilium/package.py new file mode 100644 index 0000000000..ef5de56f79 --- /dev/null +++ b/var/spack/repos/builtin/packages/unibilium/package.py @@ -0,0 +1,12 @@ +from spack import * + +class Unibilium(Package): + """A terminfo parsing library""" + homepage = "https://github.com/mauke/unibilium" + url = "https://github.com/mauke/unibilium/archive/v1.2.0.tar.gz" + + version('1.2.0', '9b1c97839a880a373da6c097443b43c4') + + def install(self, spec, prefix): + make("PREFIX="+prefix) + make("install", "PREFIX="+prefix) diff --git a/var/spack/repos/builtin/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py index 55728b0515..4b92659478 100644 --- a/var/spack/repos/builtin/packages/wget/package.py +++ b/var/spack/repos/builtin/packages/wget/package.py @@ -17,6 +17,8 @@ class Wget(Package): def install(self, spec, prefix): configure("--prefix=%s" % prefix, - "--with-ssl=openssl") + "--with-ssl=openssl", + "OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include, + "OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec['openssl'].prefix.lib) make() make("install") diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py index b59ab178ae..bd02ddcd4b 100644 --- a/var/spack/repos/builtin/packages/xerces-c/package.py +++ b/var/spack/repos/builtin/packages/xerces-c/package.py @@ -1,19 +1,3 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install xerces-c -# -# You can always get back here to change things with: -# -# spack edit xerces-c -# -# See the spack documentation for more information on building -# packages. -# from spack import * class XercesC(Package): @@ -24,8 +8,8 @@ class XercesC(Package): """ homepage = "https://xerces.apache.org/xerces-c" - url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.2.tar.gz" - version('3.1.2', '9eb1048939e88d6a7232c67569b23985') + url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.3.tar.bz2" + version('3.1.3', '5e333b55cb43e6b025ddf0e5d0f0fb0d') def install(self, spec, prefix): configure("--prefix=%s" % prefix, diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py index e20ae81adb..738dfb508b 100644 --- a/var/spack/repos/builtin/packages/zoltan/package.py +++ b/var/spack/repos/builtin/packages/zoltan/package.py @@ -1,3 +1,4 @@ +import re, os, glob from spack import * class Zoltan(Package): @@ -12,8 +13,13 @@ class Zoltan(Package): base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions" version('3.83', '1ff1bc93f91e12f2c533ddb01f2c095f') + version('3.8', '9d8fba8a990896881b85351d4327c4a9') + version('3.6', '9cce794f7241ecd8dbea36c3d7a880f9') version('3.3', '5eb8f00bda634b25ceefa0122bd18d65') + variant('debug', default=False, description='Builds a debug version of the library') + variant('shared', default=True, description='Builds a shared version of the library') + variant('fortran', default=True, description='Enable Fortran support') variant('mpi', default=False, description='Enable MPI support') @@ -24,28 +30,49 @@ class Zoltan(Package): '--enable-f90interface' if '+fortan' in spec else '--disable-f90interface', '--enable-mpi' if '+mpi' in spec else '--disable-mpi', ] + config_cflags = [ + '-O0' if '+debug' in spec else '-O3', + '-g' if '+debug' in spec else '-g0', + ] + + if '+shared' in spec: + config_args.append('--with-ar=$(CXX) -shared $(LDFLAGS) -o') + config_args.append('RANLIB=echo') + config_cflags.append('-fPIC') if '+mpi' in spec: - config_args.append('--with-mpi=%s' % spec['mpi'].prefix) - config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin) config_args.append('CC=%s/mpicc' % spec['mpi'].prefix.bin) config_args.append('CXX=%s/mpicxx' % spec['mpi'].prefix.bin) + config_args.append('--with-mpi=%s' % spec['mpi'].prefix) + config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin) # NOTE: Early versions of Zoltan come packaged with a few embedded # library packages (e.g. ParMETIS, Scotch), which messes with Spack's # ability to descend directly into the package's source directory. - if spec.satisfies('@:3.3'): + if spec.satisfies('@:3.6'): cd('Zoltan_v%s' % self.version) mkdirp('build') cd('build') config_zoltan = Executable('../configure') - config_zoltan('--prefix=%s' % pwd(), *config_args) + config_zoltan( + '--prefix=%s' % pwd(), + '--with-cflags=%s' % ' '.join(config_cflags), + '--with-cxxflags=%s' % ' '.join(config_cflags), + *config_args) make() make('install') + # NOTE: Unfortunately, Zoltan doesn't provide any configuration options for + # the extension of the output library files, so this script must change these + # extensions as a post-processing step. + if '+shared' in spec: + for libpath in glob.glob('lib/*.a'): + libdir, libname = (os.path.dirname(libpath), os.path.basename(libpath)) + move(libpath, os.path.join(libdir, re.sub(r'\.a$', '.so', libname))) + mkdirp(prefix) move('include', prefix) move('lib', prefix) |