From fc576a40d66ad6f83fd512ce053cc83e011a1e30 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 6 Sep 2016 15:07:11 +0200 Subject: modules : ('build',) type dependencies are not accounted when autoloading fixes #1681 --- lib/spack/spack/modules.py | 3 ++- lib/spack/spack/test/modules.py | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 70c3c35d8c..3db08a6e90 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -120,7 +120,7 @@ def dependencies(spec, request='all'): return [] if request == 'direct': - return spec.dependencies() + return spec.dependencies(deptype=('link', 'run')) # FIXME : during module file creation nodes seem to be visited multiple # FIXME : times even if cover='nodes' is given. This work around permits @@ -133,6 +133,7 @@ def dependencies(spec, request='all'): spec.traverse(order='post', depth=True, cover='nodes', + deptype=('link', 'run'), root=False), reverse=True)] return [xx for ii, xx in l if not (xx in seen or seen_add(xx))] diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 5e280d8e43..55b771fc79 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -229,6 +229,28 @@ class TclTests(MockPackagesTest): self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5) self.assertEqual(len([x for x in content if 'module load ' in x]), 5) + # dtbuild1 has + # - 1 ('run',) dependency + # - 1 ('build','link') dependency + # - 1 ('build',) dependency + # Just make sure the 'build' dependency is not there + spack.modules.CONFIGURATION = configuration_autoload_direct + spec = spack.spec.Spec('dtbuild1') + content = self.get_modulefile_content(spec) + self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2) + self.assertEqual(len([x for x in content if 'module load ' in x]), 2) + + # dtbuild1 has + # - 1 ('run',) dependency + # - 1 ('build','link') dependency + # - 1 ('build',) dependency + # Just make sure the 'build' dependency is not there + spack.modules.CONFIGURATION = configuration_autoload_all + spec = spack.spec.Spec('dtbuild1') + content = self.get_modulefile_content(spec) + self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2) + self.assertEqual(len([x for x in content if 'module load ' in x]), 2) + def test_prerequisites(self): spack.modules.CONFIGURATION = configuration_prerequisites_direct spec = spack.spec.Spec('mpileaks arch=x86-linux') -- cgit v1.2.3-60-g2f50 From f34dd94166ecf92b981908153a02bd583bdff164 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 6 Sep 2016 07:55:54 -0700 Subject: Add debug command for creating tarball of install DB. --- lib/spack/spack/cmd/debug.py | 84 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 lib/spack/spack/cmd/debug.py (limited to 'lib') diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py new file mode 100644 index 0000000000..958eb829b4 --- /dev/null +++ b/lib/spack/spack/cmd/debug.py @@ -0,0 +1,84 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +from datetime import datetime +from glob import glob + +import llnl.util.tty as tty +from llnl.util.filesystem import working_dir + +import spack +from spack.util.executable import which + +description = "Debugging commands for troubleshooting Spack." + + +def setup_parser(subparser): + sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command') + sp.add_parser('create-db-tarball', + help="Create a tarball of Spack's installation metadata.") + + +def _debug_tarball_suffix(): + now = datetime.now() + suffix = now.strftime('%Y-%m-%d-%H%M%S') + + git = which('git') + if not git: + return 'nobranch-nogit-%s' % suffix + + with working_dir(spack.spack_root): + if not os.path.isdir('.git'): + return 'nobranch.nogit.%s' % suffix + + symbolic = git( + 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip() + commit = git( + 'rev-parse', '--short', 'HEAD', output=str).strip() + + if symbolic == commit: + return "nobranch.%s.%s" % (commit, suffix) + else: + return "%s.%s.%s" % (symbolic, commit, suffix) + + +def create_db_tarball(args): + tar = which('tar') + tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix() + tarball_path = os.path.abspath(tarball_name) + + with working_dir(spack.spack_root): + files = [spack.installed_db._index_path] + files += glob('%s/*/*/*/.spack/spec.yaml' % spack.install_path) + files = [os.path.relpath(f) for f in files] + + tar('-czf', tarball_path, *files) + + tty.msg('Created %s' % tarball_name) + + +def debug(parser, args): + action = {'create-db-tarball': create_db_tarball} + action[args.debug_command](args) -- cgit v1.2.3-60-g2f50 From fd02a140c4b71a948fb478b810dce91ea0ab5ae8 Mon Sep 17 00:00:00 2001 From: scheibelp Date: Wed, 7 Sep 2016 07:02:24 -0700 Subject: Fix fetching non-expanded resources from mirrors (#1310) This closes #1308, where fetching a non-expanded resource from a mirror will cause an error. This also ensures that when a URL resource is fetched from a mirror, that it will be named as though it were retrieved from the original URL. This is particularly useful for non-expanded resources since it ensures that the resource name is consistent for the installation (this is less important for expanded resources because the build takes place inside the expanded resource). --- lib/spack/spack/cmd/test.py | 2 +- lib/spack/spack/fetch_strategy.py | 11 +++++------ lib/spack/spack/stage.py | 34 +++++++++++++++++++--------------- 3 files changed, 25 insertions(+), 22 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index bf7342f606..52c2a06778 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -56,7 +56,7 @@ class MockCache(object): def store(self, copyCmd, relativeDst): pass - def fetcher(self, targetPath, digest): + def fetcher(self, targetPath, digest, **kwargs): return MockCacheFetcher() diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index c69a23033c..4349e32d4f 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -170,12 +170,11 @@ class URLFetchStrategy(FetchStrategy): tty.msg("Already downloaded %s" % self.archive_file) return - possible_files = self.stage.expected_archive_files save_file = None partial_file = None - if possible_files: - save_file = self.stage.expected_archive_files[0] - partial_file = self.stage.expected_archive_files[0] + '.part' + if self.stage.save_filename: + save_file = self.stage.save_filename + partial_file = self.stage.save_filename + '.part' tty.msg("Trying to fetch from %s" % self.url) @@ -858,9 +857,9 @@ class FsCache(object): mkdirp(os.path.dirname(dst)) fetcher.archive(dst) - def fetcher(self, targetPath, digest): + def fetcher(self, targetPath, digest, **kwargs): url = "file://" + join_path(self.root, targetPath) - return CacheURLFetchStrategy(url, digest) + return CacheURLFetchStrategy(url, digest, **kwargs) def destroy(self): shutil.rmtree(self.root, ignore_errors=True) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index c0705a89c8..b659cfb2fb 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -216,9 +216,9 @@ class Stage(object): def expected_archive_files(self): """Possible archive file paths.""" paths = [] - if isinstance(self.fetcher, fs.URLFetchStrategy): + if isinstance(self.default_fetcher, fs.URLFetchStrategy): paths.append(os.path.join( - self.path, os.path.basename(self.fetcher.url))) + self.path, os.path.basename(self.default_fetcher.url))) if self.mirror_path: paths.append(os.path.join( @@ -226,19 +226,19 @@ class Stage(object): return paths + @property + def save_filename(self): + possible_filenames = self.expected_archive_files + if possible_filenames: + # This prefers using the URL associated with the default fetcher if + # available, so that the fetched resource name matches the remote + # name + return possible_filenames[0] + @property def archive_file(self): """Path to the source archive within this stage directory.""" - paths = [] - if isinstance(self.fetcher, fs.URLFetchStrategy): - paths.append(os.path.join( - self.path, os.path.basename(self.fetcher.url))) - - if self.mirror_path: - paths.append(os.path.join( - self.path, os.path.basename(self.mirror_path))) - - for path in paths: + for path in self.expected_archive_files: if os.path.exists(path): return path else: @@ -301,8 +301,10 @@ class Stage(object): # then use the same digest. `spack mirror` ensures that # the checksum will be the same. digest = None + expand = True if isinstance(self.default_fetcher, fs.URLFetchStrategy): digest = self.default_fetcher.digest + expand = self.default_fetcher.expand_archive # Have to skip the checksum for things archived from # repositories. How can this be made safer? @@ -310,9 +312,11 @@ class Stage(object): # Add URL strategies for all the mirrors with the digest for url in urls: - fetchers.insert(0, fs.URLFetchStrategy(url, digest)) - fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path, - digest)) + fetchers.insert( + 0, fs.URLFetchStrategy(url, digest, expand=expand)) + fetchers.insert( + 0, spack.fetch_cache.fetcher( + self.mirror_path, digest, expand=expand)) # Look for the archive in list_url package_name = os.path.dirname(self.mirror_path) -- cgit v1.2.3-60-g2f50 From a22f5d8d8691dd12309ebdb613c01341b55da1c3 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 8 Sep 2016 11:02:56 -0500 Subject: Don't overwrite PYTHONPATH in docs Makefile (#1693) --- lib/spack/docs/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile index 95d26041b7..bdbdab7e8b 100644 --- a/lib/spack/docs/Makefile +++ b/lib/spack/docs/Makefile @@ -7,7 +7,7 @@ SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build -export PYTHONPATH = ../../spack +export PYTHONPATH := ../../spack:$(PYTHONPATH) APIDOC_FILES = spack*.rst # Internal variables. -- cgit v1.2.3-60-g2f50 From 6117ef44e40358afe11e629309b344658cbe6cc3 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Wed, 14 Sep 2016 22:42:14 -0400 Subject: Make libstdc++-detection work on Darwin (#1778) --- lib/spack/spack/abi.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index 064abb9782..7a6f9c3f40 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -54,10 +54,10 @@ class ABI(object): output = None if compiler.cxx: rungcc = Executable(compiler.cxx) - libname = "libstdc++.so" + libname = "libstdc++." + dso_suffix elif compiler.cc: rungcc = Executable(compiler.cc) - libname = "libgcc_s.so" + libname = "libgcc_s." + dso_suffix else: return None try: -- cgit v1.2.3-60-g2f50 From 7043fff8078d0b7a62895302dabbe71904220edf Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 15 Sep 2016 04:48:28 +0200 Subject: Fix location -p and -P (#1776) --- lib/spack/spack/cmd/location.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index b9c8b5c330..54f7185707 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -25,7 +25,6 @@ import argparse import llnl.util.tty as tty -from llnl.util.filesystem import join_path import spack import spack.cmd @@ -77,7 +76,7 @@ def location(parser, args): print spack.prefix elif args.packages: - print spack.repo.root + print spack.repo.first_repo().root elif args.stages: print spack.stage_path @@ -99,7 +98,7 @@ def location(parser, args): if args.package_dir: # This one just needs the spec name. - print join_path(spack.repo.root, spec.name) + print spack.repo.dirname_for_package_name(spec.name) else: # These versions need concretized specs. -- cgit v1.2.3-60-g2f50 From efadc0e2997015545d656f587a5478c9fcc1c2ad Mon Sep 17 00:00:00 2001 From: Eric Date: Tue, 20 Sep 2016 11:18:26 +0200 Subject: Allow multi-user installations (#1804) When re-using previously downloaded tarballs, spack copies from `var/spack/stage/PACKAGE-VERSION-HASH/downloaded_file` to `var/spack/cache/PACKAGE/downloaded_file`. This fails if the source is owned by a different user (`shutil.copy` tries to retain all meta data including file ownership). Change to a non-meta-data copy function (`shutil.copyfile`). --- lib/spack/spack/fetch_strategy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 4349e32d4f..21802c4556 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -306,7 +306,7 @@ class URLFetchStrategy(FetchStrategy): if not self.archive_file: raise NoArchiveFileError("Cannot call archive() before fetching.") - shutil.copy(self.archive_file, destination) + shutil.copyfile(self.archive_file, destination) @_needs_stage def check(self): -- cgit v1.2.3-60-g2f50 From ea446c0f0ea0dc905ea66ad6a902cbb4713f920a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 20 Sep 2016 11:26:25 +0200 Subject: lmod : added support for the creation of hierarchical lua module files (#1723) Includes : - treatment of a generic hierarchy (i.e. lapack + mpi + compiler) - possibility to specify which compilers are to be considered Core - correct treatment of the 'family' directive - unit tests for most new features --- .gitignore | 1 + lib/spack/spack/cmd/__init__.py | 10 +- lib/spack/spack/cmd/module.py | 2 +- lib/spack/spack/hooks/__init__.py | 14 +- lib/spack/spack/hooks/lmodmodule.py | 35 +++ lib/spack/spack/modules.py | 239 ++++++++++++++- lib/spack/spack/schema/modules.py | 15 +- lib/spack/spack/test/database.py | 14 +- lib/spack/spack/test/modules.py | 372 +++++++++++++++-------- var/spack/repos/builtin/packages/llvm/package.py | 7 +- 10 files changed, 560 insertions(+), 149 deletions(-) create mode 100644 lib/spack/spack/hooks/lmodmodule.py (limited to 'lib') diff --git a/.gitignore b/.gitignore index b1215f0c7e..072bf30c07 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ /etc/spackconfig /share/spack/dotkit /share/spack/modules +/share/spack/lmod /TAGS /htmlcov .coverage diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index f69f434afd..6b1561b7fc 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -69,17 +69,17 @@ def get_cmd_function_name(name): def get_module(name): """Imports the module for a particular command name and returns it.""" module_name = "%s.%s" % (__name__, name) - module = __import__( - module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION], - level=0) + module = __import__(module_name, + fromlist=[name, SETUP_PARSER, DESCRIPTION], + level=0) attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op attr_setdefault(module, DESCRIPTION, "") fn_name = get_cmd_function_name(name) if not hasattr(module, fn_name): - tty.die("Command module %s (%s) must define function '%s'." - % (module.__name__, module.__file__, fn_name)) + tty.die("Command module %s (%s) must define function '%s'." % + (module.__name__, module.__file__, fn_name)) return module diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 2d0b83fe00..c6fa84109e 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -29,10 +29,10 @@ import os import shutil import sys +import llnl.util.filesystem as filesystem import llnl.util.tty as tty import spack.cmd import spack.cmd.common.arguments as arguments -import llnl.util.filesystem as filesystem from spack.modules import module_types description = "Manipulate module files" diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index c7c84defa0..ff4ebc2e57 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -24,7 +24,7 @@ ############################################################################## """This package contains modules with hooks for various stages in the Spack install process. You can add modules here and they'll be - executaed by package at various times during the package lifecycle. + executed by package at various times during the package lifecycle. Each hook is just a function that takes a package as a parameter. Hooks are not executed in any particular order. @@ -41,9 +41,10 @@ features. """ import imp -from llnl.util.lang import memoized, list_modules -from llnl.util.filesystem import join_path + import spack +from llnl.util.filesystem import join_path +from llnl.util.lang import memoized, list_modules @memoized @@ -70,12 +71,11 @@ class HookRunner(object): if hasattr(hook, '__call__'): hook(pkg) - # # Define some functions that can be called to fire off hooks. # -pre_install = HookRunner('pre_install') -post_install = HookRunner('post_install') +pre_install = HookRunner('pre_install') +post_install = HookRunner('post_install') -pre_uninstall = HookRunner('pre_uninstall') +pre_uninstall = HookRunner('pre_uninstall') post_uninstall = HookRunner('post_uninstall') diff --git a/lib/spack/spack/hooks/lmodmodule.py b/lib/spack/spack/hooks/lmodmodule.py new file mode 100644 index 0000000000..6b4318b1d0 --- /dev/null +++ b/lib/spack/spack/hooks/lmodmodule.py @@ -0,0 +1,35 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import spack.modules + + +def post_install(pkg): + dk = spack.modules.LmodModule(pkg.spec) + dk.write() + + +def post_uninstall(pkg): + dk = spack.modules.LmodModule(pkg.spec) + dk.remove() diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 3db08a6e90..d75bfbac45 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -40,6 +40,7 @@ module file. """ import copy import datetime +import itertools import os import os.path import re @@ -48,6 +49,7 @@ import textwrap import llnl.util.tty as tty import spack +import spack.compilers # Needed by LmodModules import spack.config from llnl.util.filesystem import join_path, mkdirp from spack.build_environment import parent_class_modules @@ -56,7 +58,8 @@ from spack.environment import * __all__ = ['EnvModule', 'Dotkit', 'TclModule'] -# Registry of all types of modules. Entries created by EnvModule's metaclass +"""Registry of all types of modules. Entries created by EnvModule's + metaclass.""" module_types = {} CONFIGURATION = spack.config.get_config('modules') @@ -633,3 +636,237 @@ class TclModule(EnvModule): raise SystemExit('Module generation aborted.') line = line.format(**naming_tokens) yield line + +# To construct an arbitrary hierarchy of module files: +# 1. Parse the configuration file and check that all the items in +# hierarchical_scheme are indeed virtual packages +# This needs to be done only once at start-up +# 2. Order the stack as `hierarchical_scheme + ['mpi, 'compiler'] +# 3. Check which of the services are provided by the package +# -> may be more than one +# 4. Check which of the services are needed by the package +# -> this determines where to write the module file +# 5. For each combination of services in which we have at least one provider +# here add the appropriate conditional MODULEPATH modifications + + +class LmodModule(EnvModule): + name = 'lmod' + path = join_path(spack.share_path, "lmod") + + environment_modifications_formats = { + PrependPath: 'prepend_path("{name}", "{value}")\n', + AppendPath: 'append_path("{name}", "{value}")\n', + RemovePath: 'remove_path("{name}", "{value}")\n', + SetEnv: 'setenv("{name}", "{value}")\n', + UnsetEnv: 'unsetenv("{name}")\n' + } + + autoload_format = ('if not isloaded("{module_file}") then\n' + ' LmodMessage("Autoloading {module_file}")\n' + ' load("{module_file}")\n' + 'end\n\n') + + prerequisite_format = 'prereq("{module_file}")\n' + + family_format = 'family("{family}")\n' + + path_part_with_hash = join_path('{token.name}', '{token.version}-{token.hash}') # NOQA: ignore=E501 + path_part_without_hash = join_path('{token.name}', '{token.version}') + + # TODO : Check that extra tokens specified in configuration file + # TODO : are actually virtual dependencies + configuration = CONFIGURATION.get('lmod', {}) + hierarchy_tokens = configuration.get('hierarchical_scheme', []) + hierarchy_tokens = hierarchy_tokens + ['mpi', 'compiler'] + + def __init__(self, spec=None): + super(LmodModule, self).__init__(spec) + # Sets the root directory for this architecture + self.modules_root = join_path(LmodModule.path, self.spec.architecture) + # Retrieve core compilers + self.core_compilers = self.configuration.get('core_compilers', []) + # Keep track of the requirements that this package has in terms + # of virtual packages + # that participate in the hierarchical structure + self.requires = {'compiler': self.spec.compiler} + # For each virtual dependency in the hierarchy + for x in self.hierarchy_tokens: + if x in self.spec and not self.spec.package.provides( + x): # if I depend on it + self.requires[x] = self.spec[x] # record the actual provider + # Check what are the services I need (this will determine where the + # module file will be written) + self.substitutions = {} + self.substitutions.update(self.requires) + # TODO : complete substitutions + # Check what service I provide to others + self.provides = {} + # If it is in the list of supported compilers family -> compiler + if self.spec.name in spack.compilers.supported_compilers(): + self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec)) + # Special case for llvm + if self.spec.name == 'llvm': + self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec)) + self.provides['compiler'].name = 'clang' + + for x in self.hierarchy_tokens: + if self.spec.package.provides(x): + self.provides[x] = self.spec[x] + + def _hierarchy_token_combinations(self): + """ + Yields all the relevant combinations that could appear in the hierarchy + """ + for ii in range(len(self.hierarchy_tokens) + 1): + for item in itertools.combinations(self.hierarchy_tokens, ii): + if 'compiler' in item: + yield item + + def _hierarchy_to_be_provided(self): + """ + Filters a list of hierarchy tokens and yields only the one that we + need to provide + """ + for item in self._hierarchy_token_combinations(): + if any(x in self.provides for x in item): + yield item + + def token_to_path(self, name, value): + # If we are dealing with a core compiler, return 'Core' + if name == 'compiler' and str(value) in self.core_compilers: + return 'Core' + # CompilerSpec does not have an hash + if name == 'compiler': + return self.path_part_without_hash.format(token=value) + # For virtual providers add a small part of the hash + # to distinguish among different variants in a directory hierarchy + value.hash = value.dag_hash(length=6) + return self.path_part_with_hash.format(token=value) + + @property + def file_name(self): + parts = [self.token_to_path(x, self.requires[x]) + for x in self.hierarchy_tokens if x in self.requires] + hierarchy_name = join_path(*parts) + fullname = join_path(self.modules_root, hierarchy_name, + self.use_name + '.lua') + return fullname + + @property + def use_name(self): + return self.token_to_path('', self.spec) + + def modulepath_modifications(self): + # What is available is what we require plus what we provide + entry = '' + available = {} + available.update(self.requires) + available.update(self.provides) + available_parts = [self.token_to_path(x, available[x]) + for x in self.hierarchy_tokens if x in available] + # Missing parts + missing = [x for x in self.hierarchy_tokens if x not in available] + # Direct path we provide on top of compilers + modulepath = join_path(self.modules_root, *available_parts) + env = EnvironmentModifications() + env.prepend_path('MODULEPATH', modulepath) + for line in self.process_environment_command(env): + entry += line + + def local_variable(x): + lower, upper = x.lower(), x.upper() + fmt = 'local {lower}_name = os.getenv("LMOD_{upper}_NAME")\n' + fmt += 'local {lower}_version = os.getenv("LMOD_{upper}_VERSION")\n' # NOQA: ignore=501 + return fmt.format(lower=lower, upper=upper) + + def set_variables_for_service(env, x): + upper = x.upper() + s = self.provides[x] + name, version = os.path.split(self.token_to_path(x, s)) + + env.set('LMOD_{upper}_NAME'.format(upper=upper), name) + env.set('LMOD_{upper}_VERSION'.format(upper=upper), version) + + def conditional_modulepath_modifications(item): + entry = 'if ' + needed = [] + for x in self.hierarchy_tokens: + if x in missing: + needed.append('{x}_name '.format(x=x)) + entry += 'and '.join(needed) + 'then\n' + entry += ' local t = pathJoin("{root}"'.format( + root=self.modules_root) + for x in item: + if x in missing: + entry += ', {lower}_name, {lower}_version'.format( + lower=x.lower()) + else: + entry += ', "{x}"'.format( + x=self.token_to_path(x, available[x])) + entry += ')\n' + entry += ' prepend_path("MODULEPATH", t)\n' + entry += 'end\n\n' + return entry + + if 'compiler' not in self.provides: + # Retrieve variables + entry += '\n' + for x in missing: + entry += local_variable(x) + entry += '\n' + # Conditional modifications + conditionals = [x + for x in self._hierarchy_to_be_provided() + if any(t in missing for t in x)] + for item in conditionals: + entry += conditional_modulepath_modifications(item) + + # Set environment variables for the services we provide + env = EnvironmentModifications() + for x in self.provides: + set_variables_for_service(env, x) + for line in self.process_environment_command(env): + entry += line + + return entry + + @property + def header(self): + timestamp = datetime.datetime.now() + # Header as in + # https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/advanced-user-guide/more-about-writing-module-files + header = "-- -*- lua -*-\n" + header += '-- Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501 + header += '--\n' + header += '-- %s\n' % self.spec.short_spec + header += '--\n' + + # Short description -> whatis() + if self.short_description: + header += "whatis([[Name : {name}]])\n".format(name=self.spec.name) + header += "whatis([[Version : {version}]])\n".format( + version=self.spec.version) + + # Long description -> help() + if self.long_description: + doc = re.sub(r'"', '\"', self.long_description) + header += "help([[{documentation}]])\n".format(documentation=doc) + + # Certain things need to be done only if we provide a service + if self.provides: + # Add family directives + header += '\n' + for x in self.provides: + header += self.family_format.format(family=x) + header += '\n' + header += '-- MODULEPATH modifications\n' + header += '\n' + # Modify MODULEPATH + header += self.modulepath_modifications() + # Set environment variables for services we provide + header += '\n' + header += '-- END MODULEPATH modifications\n' + header += '\n' + + return header diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index f8066919f1..bdc70c9ef1 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -139,7 +139,20 @@ schema = { 'default': [], 'items': { 'type': 'string', - 'enum': ['tcl', 'dotkit']}}, + 'enum': ['tcl', 'dotkit', 'lmod']}}, + 'lmod': { + 'allOf': [ + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, + { + 'core_compilers': { + '$ref': '#/definitions/array_of_strings' + }, + 'hierarchical_scheme': { + '$ref': '#/definitions/array_of_strings' + } + } # Specific lmod extensions + ]}, 'tcl': { 'allOf': [ # Base configuration diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 22b1f17890..b114bbacb8 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -26,8 +26,8 @@ These tests check the database is functioning properly, both in memory and in its file """ -import os.path import multiprocessing +import os.path import spack from llnl.util.filesystem import join_path @@ -88,16 +88,16 @@ class DatabaseTest(MockDatabase): # query specs with multiple configurations mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')] callpath_specs = [s for s in all_specs if s.satisfies('callpath')] - mpi_specs = [s for s in all_specs if s.satisfies('mpi')] + mpi_specs = [s for s in all_specs if s.satisfies('mpi')] self.assertEqual(len(mpileaks_specs), 3) self.assertEqual(len(callpath_specs), 3) self.assertEqual(len(mpi_specs), 3) # query specs with single configurations - dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')] + dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')] libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')] - libelf_specs = [s for s in all_specs if s.satisfies('libelf')] + libelf_specs = [s for s in all_specs if s.satisfies('libelf')] self.assertEqual(len(dyninst_specs), 1) self.assertEqual(len(libdwarf_specs), 1) @@ -163,16 +163,16 @@ class DatabaseTest(MockDatabase): # query specs with multiple configurations mpileaks_specs = self.installed_db.query('mpileaks') callpath_specs = self.installed_db.query('callpath') - mpi_specs = self.installed_db.query('mpi') + mpi_specs = self.installed_db.query('mpi') self.assertEqual(len(mpileaks_specs), 3) self.assertEqual(len(callpath_specs), 3) self.assertEqual(len(mpi_specs), 3) # query specs with single configurations - dyninst_specs = self.installed_db.query('dyninst') + dyninst_specs = self.installed_db.query('dyninst') libdwarf_specs = self.installed_db.query('libdwarf') - libelf_specs = self.installed_db.query('libelf') + libelf_specs = self.installed_db.query('libelf') self.assertEqual(len(dyninst_specs), 1) self.assertEqual(len(libdwarf_specs), 1) diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 55b771fc79..88f67090f1 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -49,105 +49,10 @@ def mock_open(filename, mode): handle.close() -configuration_autoload_direct = { - 'enable': ['tcl'], - 'tcl': { - 'all': { - 'autoload': 'direct' - } - } -} - -configuration_autoload_all = { - 'enable': ['tcl'], - 'tcl': { - 'all': { - 'autoload': 'all' - } - } -} - -configuration_prerequisites_direct = { - 'enable': ['tcl'], - 'tcl': { - 'all': { - 'prerequisites': 'direct' - } - } -} - -configuration_prerequisites_all = { - 'enable': ['tcl'], - 'tcl': { - 'all': { - 'prerequisites': 'all' - } - } -} - -configuration_alter_environment = { - 'enable': ['tcl'], - 'tcl': { - 'all': { - 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}, - 'environment': { - 'set': {'{name}_ROOT': '{prefix}'} - } - }, - 'platform=test target=x86_64': { - 'environment': { - 'set': {'FOO': 'foo'}, - 'unset': ['BAR'] - } - }, - 'platform=test target=x86_32': { - 'load': ['foo/bar'] - } - } -} - -configuration_blacklist = { - 'enable': ['tcl'], - 'tcl': { - 'whitelist': ['zmpi'], - 'blacklist': ['callpath', 'mpi'], - 'all': { - 'autoload': 'direct' - } - } -} - -configuration_conflicts = { - 'enable': ['tcl'], - 'tcl': { - 'naming_scheme': '{name}/{version}-{compiler.name}', - 'all': { - 'conflict': ['{name}', 'intel/14.0.1'] - } - } -} - -configuration_wrong_conflicts = { - 'enable': ['tcl'], - 'tcl': { - 'naming_scheme': '{name}/{version}-{compiler.name}', - 'all': { - 'conflict': ['{name}/{compiler.name}'] - } - } -} - -configuration_suffix = { - 'enable': ['tcl'], - 'tcl': { - 'mpileaks': { - 'suffixes': { - '+debug': 'foo', - '~debug': 'bar' - } - } - } -} +# Spec strings that will be used throughout the tests +mpich_spec_string = 'mpich@3.0.4' +mpileaks_spec_string = 'mpileaks' +libdwarf_spec_string = 'libdwarf arch=x64-linux' class HelperFunctionsTests(MockPackagesTest): @@ -187,44 +92,156 @@ class HelperFunctionsTests(MockPackagesTest): self.assertTrue('CPATH' in names) -class TclTests(MockPackagesTest): +class ModuleFileGeneratorTests(MockPackagesTest): + """ + Base class to test module file generators. Relies on child having defined + a 'factory' attribute to create an instance of the generator to be tested. + """ def setUp(self): - super(TclTests, self).setUp() - self.configuration_obj = spack.modules.CONFIGURATION + super(ModuleFileGeneratorTests, self).setUp() + self.configuration_instance = spack.modules.CONFIGURATION + self.module_types_instance = spack.modules.module_types spack.modules.open = mock_open # Make sure that a non-mocked configuration will trigger an error spack.modules.CONFIGURATION = None + spack.modules.module_types = {self.factory.name: self.factory} def tearDown(self): del spack.modules.open - spack.modules.CONFIGURATION = self.configuration_obj - super(TclTests, self).tearDown() + spack.modules.module_types = self.module_types_instance + spack.modules.CONFIGURATION = self.configuration_instance + super(ModuleFileGeneratorTests, self).tearDown() def get_modulefile_content(self, spec): spec.concretize() - generator = spack.modules.TclModule(spec) + generator = self.factory(spec) generator.write() content = FILE_REGISTRY[generator.file_name].split('\n') return content + +class TclTests(ModuleFileGeneratorTests): + + factory = spack.modules.TclModule + + configuration_autoload_direct = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'autoload': 'direct' + } + } + } + + configuration_autoload_all = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'autoload': 'all' + } + } + } + + configuration_prerequisites_direct = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'prerequisites': 'direct' + } + } + } + + configuration_prerequisites_all = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'prerequisites': 'all' + } + } + } + + configuration_alter_environment = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}, + 'environment': { + 'set': {'{name}_ROOT': '{prefix}'} + } + }, + 'platform=test target=x86_64': { + 'environment': { + 'set': {'FOO': 'foo'}, + 'unset': ['BAR'] + } + }, + 'platform=test target=x86_32': { + 'load': ['foo/bar'] + } + } + } + + configuration_blacklist = { + 'enable': ['tcl'], + 'tcl': { + 'whitelist': ['zmpi'], + 'blacklist': ['callpath', 'mpi'], + 'all': { + 'autoload': 'direct' + } + } + } + + configuration_conflicts = { + 'enable': ['tcl'], + 'tcl': { + 'naming_scheme': '{name}/{version}-{compiler.name}', + 'all': { + 'conflict': ['{name}', 'intel/14.0.1'] + } + } + } + + configuration_wrong_conflicts = { + 'enable': ['tcl'], + 'tcl': { + 'naming_scheme': '{name}/{version}-{compiler.name}', + 'all': { + 'conflict': ['{name}/{compiler.name}'] + } + } + } + + configuration_suffix = { + 'enable': ['tcl'], + 'tcl': { + 'mpileaks': { + 'suffixes': { + '+debug': 'foo', + '~debug': 'bar' + } + } + } + } + def test_simple_case(self): - spack.modules.CONFIGURATION = configuration_autoload_direct - spec = spack.spec.Spec('mpich@3.0.4') + spack.modules.CONFIGURATION = self.configuration_autoload_direct + spec = spack.spec.Spec(mpich_spec_string) content = self.get_modulefile_content(spec) self.assertTrue('module-whatis "mpich @3.0.4"' in content) self.assertRaises(TypeError, spack.modules.dependencies, spec, 'non-existing-tag') def test_autoload(self): - spack.modules.CONFIGURATION = configuration_autoload_direct - spec = spack.spec.Spec('mpileaks') + spack.modules.CONFIGURATION = self.configuration_autoload_direct + spec = spack.spec.Spec(mpileaks_spec_string) content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2) self.assertEqual(len([x for x in content if 'module load ' in x]), 2) - spack.modules.CONFIGURATION = configuration_autoload_all - spec = spack.spec.Spec('mpileaks') + spack.modules.CONFIGURATION = self.configuration_autoload_all + spec = spack.spec.Spec(mpileaks_spec_string) content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5) self.assertEqual(len([x for x in content if 'module load ' in x]), 5) @@ -252,18 +269,18 @@ class TclTests(MockPackagesTest): self.assertEqual(len([x for x in content if 'module load ' in x]), 2) def test_prerequisites(self): - spack.modules.CONFIGURATION = configuration_prerequisites_direct + spack.modules.CONFIGURATION = self.configuration_prerequisites_direct spec = spack.spec.Spec('mpileaks arch=x86-linux') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'prereq' in x]), 2) - spack.modules.CONFIGURATION = configuration_prerequisites_all + spack.modules.CONFIGURATION = self.configuration_prerequisites_all spec = spack.spec.Spec('mpileaks arch=x86-linux') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'prereq' in x]), 5) def test_alter_environment(self): - spack.modules.CONFIGURATION = configuration_alter_environment + spack.modules.CONFIGURATION = self.configuration_alter_environment spec = spack.spec.Spec('mpileaks platform=test target=x86_64') content = self.get_modulefile_content(spec) self.assertEqual( @@ -293,7 +310,7 @@ class TclTests(MockPackagesTest): len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1) def test_blacklist(self): - spack.modules.CONFIGURATION = configuration_blacklist + spack.modules.CONFIGURATION = self.configuration_blacklist spec = spack.spec.Spec('mpileaks ^zmpi') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1) @@ -307,7 +324,7 @@ class TclTests(MockPackagesTest): self.assertEqual(len([x for x in content if 'module load ' in x]), 1) def test_conflicts(self): - spack.modules.CONFIGURATION = configuration_conflicts + spack.modules.CONFIGURATION = self.configuration_conflicts spec = spack.spec.Spec('mpileaks') content = self.get_modulefile_content(spec) self.assertEqual( @@ -317,11 +334,11 @@ class TclTests(MockPackagesTest): self.assertEqual( len([x for x in content if x == 'conflict intel/14.0.1']), 1) - spack.modules.CONFIGURATION = configuration_wrong_conflicts + spack.modules.CONFIGURATION = self.configuration_wrong_conflicts self.assertRaises(SystemExit, self.get_modulefile_content, spec) def test_suffixes(self): - spack.modules.CONFIGURATION = configuration_suffix + spack.modules.CONFIGURATION = self.configuration_suffix spec = spack.spec.Spec('mpileaks+debug arch=x86-linux') spec.concretize() generator = spack.modules.TclModule(spec) @@ -333,18 +350,123 @@ class TclTests(MockPackagesTest): self.assertTrue('bar' in generator.use_name) -configuration_dotkit = { - 'enable': ['dotkit'], - 'dotkit': { - 'all': { - 'prerequisites': 'direct' +class LmodTests(ModuleFileGeneratorTests): + factory = spack.modules.LmodModule + + configuration_autoload_direct = { + 'enable': ['lmod'], + 'lmod': { + 'all': { + 'autoload': 'direct' + } } } -} + + configuration_autoload_all = { + 'enable': ['lmod'], + 'lmod': { + 'all': { + 'autoload': 'all' + } + } + } + + configuration_alter_environment = { + 'enable': ['lmod'], + 'lmod': { + 'all': { + 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']} + }, + 'platform=test target=x86_64': { + 'environment': { + 'set': {'FOO': 'foo'}, + 'unset': ['BAR'] + } + }, + 'platform=test target=x86_32': { + 'load': ['foo/bar'] + } + } + } + + configuration_blacklist = { + 'enable': ['lmod'], + 'lmod': { + 'blacklist': ['callpath'], + 'all': { + 'autoload': 'direct' + } + } + } + + def test_simple_case(self): + spack.modules.CONFIGURATION = self.configuration_autoload_direct + spec = spack.spec.Spec(mpich_spec_string) + content = self.get_modulefile_content(spec) + self.assertTrue('-- -*- lua -*-' in content) + self.assertTrue('whatis([[Name : mpich]])' in content) + self.assertTrue('whatis([[Version : 3.0.4]])' in content) + + def test_autoload(self): + spack.modules.CONFIGURATION = self.configuration_autoload_direct + spec = spack.spec.Spec(mpileaks_spec_string) + content = self.get_modulefile_content(spec) + self.assertEqual( + len([x for x in content if 'if not isloaded(' in x]), 2) + self.assertEqual(len([x for x in content if 'load(' in x]), 2) + + spack.modules.CONFIGURATION = self.configuration_autoload_all + spec = spack.spec.Spec(mpileaks_spec_string) + content = self.get_modulefile_content(spec) + self.assertEqual( + len([x for x in content if 'if not isloaded(' in x]), 5) + self.assertEqual(len([x for x in content if 'load(' in x]), 5) + + def test_alter_environment(self): + spack.modules.CONFIGURATION = self.configuration_alter_environment + spec = spack.spec.Spec('mpileaks platform=test target=x86_64') + content = self.get_modulefile_content(spec) + self.assertEqual( + len([x + for x in content + if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]), 0) + self.assertEqual( + len([x for x in content if 'setenv("FOO", "foo")' in x]), 1) + self.assertEqual( + len([x for x in content if 'unsetenv("BAR")' in x]), 1) + + spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32') + content = self.get_modulefile_content(spec) + print('\n'.join(content)) + self.assertEqual( + len([x + for x in content + if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]), 0) + self.assertEqual( + len([x for x in content if 'setenv("FOO", "foo")' in x]), 0) + self.assertEqual( + len([x for x in content if 'unsetenv("BAR")' in x]), 0) + + def test_blacklist(self): + spack.modules.CONFIGURATION = self.configuration_blacklist + spec = spack.spec.Spec(mpileaks_spec_string) + content = self.get_modulefile_content(spec) + self.assertEqual( + len([x for x in content if 'if not isloaded(' in x]), 1) + self.assertEqual(len([x for x in content if 'load(' in x]), 1) class DotkitTests(MockPackagesTest): + configuration_dotkit = { + 'enable': ['dotkit'], + 'dotkit': { + 'all': { + 'prerequisites': 'direct' + } + } + } + def setUp(self): super(DotkitTests, self).setUp() self.configuration_obj = spack.modules.CONFIGURATION @@ -365,7 +487,7 @@ class DotkitTests(MockPackagesTest): return content def test_dotkit(self): - spack.modules.CONFIGURATION = configuration_dotkit + spack.modules.CONFIGURATION = self.configuration_dotkit spec = spack.spec.Spec('mpileaks arch=x86-linux') content = self.get_modulefile_content(spec) self.assertTrue('#c spack' in content) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 61ea8daac4..7582faeb92 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -22,9 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -from spack import * import os +from spack import * + class Llvm(Package): """The LLVM Project is a collection of modular and reusable compiler and @@ -37,9 +38,11 @@ class Llvm(Package): homepage = 'http://llvm.org/' url = 'http://llvm.org/releases/3.7.1/llvm-3.7.1.src.tar.xz' + family = 'compiler' # Used by lmod + # currently required by mesa package version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', - url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') + url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') # currently required by mesa package variant('debug', default=False, description="Build a debug version of LLVM, this increases " -- cgit v1.2.3-60-g2f50 From c9fe2cd469f3f4e5e6cdb9905401da42ff214223 Mon Sep 17 00:00:00 2001 From: Eric Date: Tue, 20 Sep 2016 11:27:35 +0200 Subject: Fix format string (in class RemoveFailedError) (#1803) --- lib/spack/spack/directory_layout.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 73286483ef..8ef7d3c480 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -423,7 +423,7 @@ class RemoveFailedError(DirectoryLayoutError): def __init__(self, installed_spec, prefix, error): super(RemoveFailedError, self).__init__( 'Could not remove prefix %s for %s : %s' - % prefix, installed_spec.short_spec, error) + % (prefix, installed_spec.short_spec, error)) self.cause = error -- cgit v1.2.3-60-g2f50 From eb8a0ef75ee2d4b66418f39a336c4f0a829b5670 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 20 Sep 2016 16:47:13 +0200 Subject: fix : failing unit tests due to missing `self` (#1806) --- lib/spack/spack/test/modules.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 88f67090f1..c4cf2865bb 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -251,7 +251,7 @@ class TclTests(ModuleFileGeneratorTests): # - 1 ('build','link') dependency # - 1 ('build',) dependency # Just make sure the 'build' dependency is not there - spack.modules.CONFIGURATION = configuration_autoload_direct + spack.modules.CONFIGURATION = self.configuration_autoload_direct spec = spack.spec.Spec('dtbuild1') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2) @@ -262,7 +262,7 @@ class TclTests(ModuleFileGeneratorTests): # - 1 ('build','link') dependency # - 1 ('build',) dependency # Just make sure the 'build' dependency is not there - spack.modules.CONFIGURATION = configuration_autoload_all + spack.modules.CONFIGURATION = self.configuration_autoload_all spec = spack.spec.Spec('dtbuild1') content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2) -- cgit v1.2.3-60-g2f50 From 27801c354be7e05bca1b3fd68468db1d2728376b Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 21 Sep 2016 02:25:23 +0200 Subject: fix MacOs class for Sierra (#1811) --- lib/spack/spack/operating_systems/mac_os.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py index 3e5ab9b2e9..dafb5c1d41 100644 --- a/lib/spack/spack/operating_systems/mac_os.py +++ b/lib/spack/spack/operating_systems/mac_os.py @@ -22,7 +22,7 @@ class MacOs(OperatingSystem): "10.11": "elcapitan", "10.12": "sierra"} - mac_ver = py_platform.mac_ver()[0][:-2] + mac_ver = '.'.join(py_platform.mac_ver()[0].split('.')[:2]) name = mac_releases.get(mac_ver, "macos") super(MacOs, self).__init__(name, mac_ver) -- cgit v1.2.3-60-g2f50 From 02307cf7ceabdf16d02f1afd0f38c50bd7501080 Mon Sep 17 00:00:00 2001 From: Alfredo Adolfo Gimenez Date: Wed, 21 Sep 2016 00:52:09 -0700 Subject: Set JAVA_HOME on module load and on dependent installs. (#1716) * Set JAVA_HOME on module load and on dependent installs. * Include environment setup from dependencies in dependee module (#1714) --- lib/spack/spack/modules.py | 1 + var/spack/repos/builtin/packages/jdk/package.py | 6 ++++++ 2 files changed, 7 insertions(+) (limited to 'lib') diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index d75bfbac45..aa3ad5843f 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -392,6 +392,7 @@ class EnvModule(object): for mod in modules: set_module_variables_for_package(package, mod) set_module_variables_for_package(package, package.module) + package.setup_environment(spack_env, env) package.setup_dependent_package(self.pkg.module, self.spec) package.setup_dependent_environment(spack_env, env, self.spec) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 63bf6514cb..bab0920434 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -68,3 +68,9 @@ class Jdk(Package): def install(self, spec, prefix): distutils.dir_util.copy_tree(".", prefix) + + def setup_environment(self, spack_env, run_env): + run_env.set('JAVA_HOME', self.spec.prefix) + + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + spack_env.set('JAVA_HOME', self.spec.prefix) -- cgit v1.2.3-60-g2f50 From 94b24e88934aae11fd05c77600311ae99c980bc5 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Wed, 21 Sep 2016 04:48:33 -0400 Subject: Adds all available CrayPE CPU targets to platform by default. (#1745) --- lib/spack/spack/platforms/cray.py | 109 +++++++++++++++++++++++--------------- 1 file changed, 67 insertions(+), 42 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 0059b49ff1..9138ad7afe 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -8,37 +8,21 @@ from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl from llnl.util.filesystem import join_path -# Craype- module prefixes that are not valid CPU targets. -NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe') - - -def _target_from_clean_env(name): - '''Return the default back_end target as loaded in a clean login session. - - A bash subshell is launched with a wiped environment and the list of loaded - modules is parsed for the first acceptable CrayPE target. - ''' - # Based on the incantation: - # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')" - targets = [] - if name != 'front_end': - env = which('env') - env.add_default_arg('-') - # CAUTION - $USER is generally needed to initialize the environment. - # There may be other variables needed for general success. - output = env('USER=%s' % os.environ['USER'], - '/bin/bash', '--noprofile', '--norc', '-c', - '. /etc/profile; module list -lt', - output=str, error=str) - default_modules = [i for i in output.splitlines() - if len(i.split()) == 1] - tty.debug("Found default modules:", - *[" " + mod for mod in default_modules]) - pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS)) - for mod in default_modules: - if 'craype-' in mod: - targets.extend(re.findall(pattern, mod)) - return targets[0] if targets else None + +def _get_modules_in_modulecmd_output(output): + '''Return list of valid modules parsed from modulecmd output string.''' + return [i for i in output.splitlines() + if len(i.split()) == 1] + + +def _fill_craype_targets_from_modules(targets, modules): + '''Extend CrayPE CPU targets list with those found in list of modules.''' + # Craype- module prefixes that are not valid CPU targets. + non_targets = ('hugepages', 'network', 'target', 'accel', 'xtpe') + pattern = r'craype-(?!{0})(\S*)'.format('|'.join(non_targets)) + for mod in modules: + if 'craype-' in mod: + targets.extend(re.findall(pattern, mod)) class Cray(Platform): @@ -56,7 +40,12 @@ class Cray(Platform): ''' super(Cray, self).__init__('cray') - # Get targets from config or make best guess from environment: + # Make all craype targets available. + for target in self._avail_targets(): + name = target.replace('-', '_') + self.add_target(name, Target(name, 'craype-%s' % target)) + + # Get aliased targets from config or best guess from environment: conf = spack.config.get_config('targets') for name in ('front_end', 'back_end'): _target = getattr(self, name, None) @@ -64,18 +53,16 @@ class Cray(Platform): _target = os.environ.get('SPACK_' + name.upper()) if _target is None: _target = conf.get(name) - if _target is None: - _target = _target_from_clean_env(name) - setattr(self, name, _target) - + if _target is None and name == 'back_end': + _target = self._default_target_from_env() if _target is not None: - self.add_target(name, Target(_target, 'craype-' + _target)) - self.add_target(_target, Target(_target, 'craype-' + _target)) + safe_name = _target.replace('-', '_') + setattr(self, name, safe_name) + self.add_target(name, self.targets[safe_name]) if self.back_end is not None: self.default = self.back_end - self.add_target( - 'default', Target(self.default, 'craype-' + self.default)) + self.add_target('default', self.targets[self.back_end]) else: raise NoPlatformError() @@ -90,7 +77,7 @@ class Cray(Platform): self.add_operating_system(self.front_os, front_distro) @classmethod - def setup_platform_environment(self, pkg, env): + def setup_platform_environment(cls, pkg, env): """ Change the linker to default dynamic to be more similar to linux/standard linker behavior """ @@ -101,5 +88,43 @@ class Cray(Platform): env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names) @classmethod - def detect(self): + def detect(cls): return os.environ.get('CRAYPE_VERSION') is not None + + def _default_target_from_env(self): + '''Set and return the default CrayPE target loaded in a clean login + session. + + A bash subshell is launched with a wiped environment and the list of + loaded modules is parsed for the first acceptable CrayPE target. + ''' + # Based on the incantation: + # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')" + if getattr(self, 'default', None) is None: + env = which('env') + env.add_default_arg('-') + # CAUTION - $USER is generally needed in the sub-environment. + # There may be other variables needed for general success. + output = env('USER=%s' % os.environ['USER'], + 'HOME=%s' % os.environ['HOME'], + '/bin/bash', '--noprofile', '--norc', '-c', + '. /etc/profile; module list -lt', + output=str, error=str) + self._defmods = _get_modules_in_modulecmd_output(output) + targets = [] + _fill_craype_targets_from_modules(targets, self._defmods) + self.default = targets[0] if targets else None + tty.debug("Found default modules:", + *[" %s" % mod for mod in self._defmods]) + return self.default + + def _avail_targets(self): + '''Return a list of available CrayPE CPU targets.''' + if getattr(self, '_craype_targets', None) is None: + module = which('modulecmd', required=True) + module.add_default_arg('python') + output = module('avail', '-t', 'craype-', output=str, error=str) + craype_modules = _get_modules_in_modulecmd_output(output) + self._craype_targets = targets = [] + _fill_craype_targets_from_modules(targets, craype_modules) + return self._craype_targets -- cgit v1.2.3-60-g2f50 From 899f3a7e37a5956255a4012e4690f3f539c8bbee Mon Sep 17 00:00:00 2001 From: Elizabeth Fischer Date: Wed, 21 Sep 2016 06:56:56 -0400 Subject: Fixed bug propagating --dirty flag to sub-installs. (#1625) * Fixed bug propagating --dirty flag to sub-installs. * Fix syntax error * Allow --dirty flag to be set with SPACK_DIRTY env var. * Added dirty flag to `spack diy` and `spack setup`, as is currently in `spack install` * flake8 --- lib/spack/spack/cmd/diy.py | 7 ++++++- lib/spack/spack/cmd/install.py | 6 ++++-- lib/spack/spack/cmd/setup.py | 7 ++++++- lib/spack/spack/package.py | 3 ++- 4 files changed, 18 insertions(+), 5 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 487654d261..1a3e2fd65c 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -52,6 +52,10 @@ def setup_parser(subparser): subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="specs to use for install. Must contain package AND version.") + subparser.add_argument( + '--dirty', action='store_true', dest='dirty', + help="Install a package *without* cleaning the environment. " + + "Or set SPACK_DIRTY environment variable") def diy(self, args): @@ -100,4 +104,5 @@ def diy(self, args): keep_prefix=args.keep_prefix, ignore_deps=args.ignore_deps, verbose=not args.quiet, - keep_stage=True) # don't remove source dir for DIY. + keep_stage=True, # don't remove source dir for DIY. + dirty=args.dirty or ('SPACK_DIRTY' in os.environ)) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 7663a97a28..a77af37ed0 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -28,6 +28,7 @@ import llnl.util.tty as tty import spack import spack.cmd +import os description = "Build and install packages" @@ -56,7 +57,8 @@ def setup_parser(subparser): help="Fake install. Just remove prefix and create a fake file.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', - help="Install a package *without* cleaning the environment.") + help="Install a package *without* cleaning the environment. " + + "Or set SPACK_DIRTY environment variable") subparser.add_argument( 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") @@ -88,5 +90,5 @@ def install(parser, args): run_tests=args.run_tests, verbose=args.verbose, fake=args.fake, - dirty=args.dirty, + dirty=args.dirty or ('SPACK_DIRTY' in os.environ), explicit=True) diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py index b55e102c0e..66095ee628 100644 --- a/lib/spack/spack/cmd/setup.py +++ b/lib/spack/spack/cmd/setup.py @@ -46,6 +46,10 @@ def setup_parser(subparser): subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="specs to use for install. Must contain package AND version.") + subparser.add_argument( + '--dirty', action='store_true', dest='dirty', + help="Install a package *without* cleaning the environment. " + + "Or set SPACK_DIRTY environment variable") def setup(self, args): @@ -91,4 +95,5 @@ def setup(self, args): ignore_deps=args.ignore_deps, verbose=args.verbose, keep_stage=True, # don't remove source dir for SETUP. - install_phases=set(['setup', 'provenance'])) + install_phases=set(['setup', 'provenance']), + dirty=args.dirty or ('SPACK_DIRTY' in os.environ)) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 882901d887..38bc6fa3f4 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -916,7 +916,8 @@ class Package(object): skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs, - run_tests=run_tests) + run_tests=run_tests, + dirty=dirty) # Set run_tests flag before starting build. self.run_tests = run_tests -- cgit v1.2.3-60-g2f50 From d848559f70ad67842150b51d2792843f3cce4621 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 21 Sep 2016 21:27:59 +0200 Subject: Reworking of `lapack_shared_libs` and similar properties (#1682) * Turned _libs into an iterable Modifications : - added class LibraryList + unit tests - added convenience functions `find_libraries` and `dedupe` - modifed non Intel blas/lapack providers - modified packages using blas_shared_libs and similar functions * atlas : added pthread variant * intel packages : added lapack_libs and blas_libs * find_library_path : removed unused function * PR review : fixed last issues * LibraryList : added test on __add__ return type * LibraryList : added __radd__ fixed unit tests fix : failing unit tests due to missing `self` * cp2k and dependecies : fixed blas-lapack related statements in package.py --- lib/spack/docs/packaging_guide.rst | 9 +- lib/spack/llnl/util/filesystem.py | 197 +++++++++++++++++---- lib/spack/llnl/util/lang.py | 16 ++ lib/spack/spack/spec.py | 7 + lib/spack/spack/test/__init__.py | 1 + lib/spack/spack/test/library_list.py | 111 ++++++++++++ .../repos/builtin/packages/armadillo/package.py | 14 +- .../repos/builtin/packages/arpack-ng/package.py | 34 ++-- var/spack/repos/builtin/packages/atlas/package.py | 36 ++-- var/spack/repos/builtin/packages/cp2k/package.py | 25 ++- var/spack/repos/builtin/packages/dealii/package.py | 5 +- var/spack/repos/builtin/packages/elk/package.py | 6 +- var/spack/repos/builtin/packages/gmsh/package.py | 6 +- var/spack/repos/builtin/packages/hpl/package.py | 2 +- var/spack/repos/builtin/packages/hypre/package.py | 9 +- .../packages/intel-parallel-studio/package.py | 44 +++-- var/spack/repos/builtin/packages/mkl/package.py | 48 ++--- var/spack/repos/builtin/packages/mumps/package.py | 8 +- .../builtin/packages/netlib-lapack/package.py | 29 ++- .../builtin/packages/netlib-scalapack/package.py | 46 ++--- var/spack/repos/builtin/packages/nwchem/package.py | 13 +- .../repos/builtin/packages/octopus/package.py | 8 +- .../repos/builtin/packages/openblas/package.py | 31 ++-- var/spack/repos/builtin/packages/opium/package.py | 10 +- var/spack/repos/builtin/packages/pexsi/package.py | 4 +- var/spack/repos/builtin/packages/psi4/package.py | 11 +- .../repos/builtin/packages/py-scipy/package.py | 4 +- .../repos/builtin/packages/suite-sparse/package.py | 4 +- .../repos/builtin/packages/sundials/package.py | 10 +- .../repos/builtin/packages/superlu-dist/package.py | 5 +- .../repos/builtin/packages/superlu/package.py | 2 +- .../repos/builtin/packages/trilinos/package.py | 8 +- .../repos/builtin/packages/wannier90/package.py | 7 +- 33 files changed, 526 insertions(+), 244 deletions(-) create mode 100644 lib/spack/spack/test/library_list.py (limited to 'lib') diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 6936b5e423..0294f32748 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2090,12 +2090,11 @@ Blas and Lapack libraries Different packages provide implementation of ``Blas`` and ``Lapack`` routines. The names of the resulting static and/or shared libraries differ from package -to package. In order to make the ``install()`` method indifferent to the +to package. In order to make the ``install()`` method independent of the choice of ``Blas`` implementation, each package which provides it -sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib`` to -point to the shared and static ``Blas`` libraries, respectively. The same -applies to packages which provide ``Lapack``. Package developers are advised to -use these variables, for example ``spec['blas'].blas_shared_lib`` instead of +sets up ``self.spec.blas_libs`` to point to the correct ``Blas`` libraries. +The same applies to packages which provide ``Lapack``. Package developers are advised to +use these variables, for example ``spec['blas'].blas_libs.joined()`` instead of hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``. ^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 22ca85abf9..c3ecfde4f4 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -22,18 +22,22 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os +import collections +import errno +import fileinput +import getpass import glob +import numbers +import os import re import shutil import stat -import errno -import getpass -from contextlib import contextmanager import subprocess -import fileinput +import sys +from contextlib import contextmanager import llnl.util.tty as tty +from llnl.util.lang import dedupe __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', @@ -42,8 +46,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'filter_file', 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'set_executable', 'copy_mode', 'unset_executable_mode', - 'remove_dead_links', 'remove_linked_tree', 'find_library_path', - 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name'] + 'remove_dead_links', 'remove_linked_tree', + 'fix_darwin_install_name', 'find_libraries', 'LibraryList'] def filter_file(regex, repl, *filenames, **kwargs): @@ -326,7 +330,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): follow_links = kwargs.get('follow_link', False) # Yield in pre or post order? - order = kwargs.get('order', 'pre') + order = kwargs.get('order', 'pre') if order not in ('pre', 'post'): raise ValueError("Order must be 'pre' or 'post'.") @@ -338,7 +342,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): return source_path = os.path.join(source_root, rel_path) - dest_path = os.path.join(dest_root, rel_path) + dest_path = os.path.join(dest_root, rel_path) # preorder yields directories before children if order == 'pre': @@ -346,8 +350,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): for f in os.listdir(source_path): source_child = os.path.join(source_path, f) - dest_child = os.path.join(dest_path, f) - rel_child = os.path.join(rel_path, f) + dest_child = os.path.join(dest_path, f) + rel_child = os.path.join(rel_path, f) # Treat as a directory if os.path.isdir(source_child) and ( @@ -440,35 +444,162 @@ def fix_darwin_install_name(path): stdout=subprocess.PIPE).communicate()[0] break +# Utilities for libraries + -def to_lib_name(library): - """Transforms a path to the library /path/to/lib.xyz into +class LibraryList(collections.Sequence): + """Sequence of absolute paths to libraries + + Provides a few convenience methods to manipulate library paths and get + commonly used compiler flags or names """ - # Assume libXYZ.suffix - return os.path.basename(library)[3:].split(".")[0] + def __init__(self, libraries): + self.libraries = list(libraries) -def to_link_flags(library): - """Transforms a path to a into linking flags -L -l. + @property + def directories(self): + """Stable de-duplication of the directories where the libraries + reside - Return: - A string of linking flags. - """ - dir = os.path.dirname(library) - name = to_lib_name(library) - res = '-L%s -l%s' % (dir, name) - return res + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) + >>> assert l.directories == ['/dir1', '/dir2'] + """ + return list(dedupe( + os.path.dirname(x) for x in self.libraries if os.path.dirname(x) + )) + + @property + def basenames(self): + """Stable de-duplication of the base-names in the list + + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a']) + >>> assert l.basenames == ['liba.a', 'libb.a'] + """ + return list(dedupe(os.path.basename(x) for x in self.libraries)) + + @property + def names(self): + """Stable de-duplication of library names in the list + + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so']) + >>> assert l.names == ['a', 'b'] + """ + return list(dedupe(x.split('.')[0][3:] for x in self.basenames)) + + @property + def search_flags(self): + """Search flags for the libraries + + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) + >>> assert l.search_flags == '-L/dir1 -L/dir2' + """ + return ' '.join(['-L' + x for x in self.directories]) + + @property + def link_flags(self): + """Link flags for the libraries + + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) + >>> assert l.search_flags == '-la -lb' + """ + return ' '.join(['-l' + name for name in self.names]) + @property + def ld_flags(self): + """Search flags + link flags -def find_library_path(libname, *paths): - """Searches for a file called in each path. + >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) + >>> assert l.search_flags == '-L/dir1 -L/dir2 -la -lb' + """ + return self.search_flags + ' ' + self.link_flags - Return: - directory where the library was found, if found. None otherwise. + def __getitem__(self, item): + cls = type(self) + if isinstance(item, numbers.Integral): + return self.libraries[item] + return cls(self.libraries[item]) + def __add__(self, other): + return LibraryList(dedupe(self.libraries + list(other))) + + def __radd__(self, other): + return self.__add__(other) + + def __eq__(self, other): + return self.libraries == other.libraries + + def __len__(self): + return len(self.libraries) + + def joined(self, separator=' '): + return separator.join(self.libraries) + + def __repr__(self): + return self.__class__.__name__ + '(' + repr(self.libraries) + ')' + + def __str__(self): + return self.joined() + + +def find_libraries(args, root, shared=True, recurse=False): + """Returns an iterable object containing a list of full paths to + libraries if found. + + Args: + args: iterable object containing a list of library names to \ + search for (e.g. 'libhdf5') + root: root folder where to start searching + shared: if True searches for shared libraries, otherwise for static + recurse: if False search only root folder, if True descends top-down \ + from the root + + Returns: + list of full paths to the libraries that have been found """ - for path in paths: - library = join_path(path, libname) - if os.path.exists(library): - return path - return None + if not isinstance(args, collections.Sequence) or isinstance(args, str): + message = '{0} expects a sequence of strings as first argument' + message += ' [got {1} instead]' + raise TypeError(message.format(find_libraries.__name__, type(args))) + + # Construct the right suffix for the library + if shared is True: + suffix = 'dylib' if sys.platform == 'darwin' else 'so' + else: + suffix = 'a' + # List of libraries we are searching with suffixes + libraries = ['{0}.{1}'.format(lib, suffix) for lib in args] + # Search method + if recurse is False: + search_method = _find_libraries_non_recursive + else: + search_method = _find_libraries_recursive + + return search_method(libraries, root) + + +def _find_libraries_recursive(libraries, root): + library_dict = collections.defaultdict(list) + for path, _, files in os.walk(root): + for lib in libraries: + if lib in files: + library_dict[lib].append( + join_path(path, lib) + ) + answer = [] + for lib in libraries: + answer.extend(library_dict[lib]) + return LibraryList(answer) + + +def _find_libraries_non_recursive(libraries, root): + + def lib_or_none(lib): + library = join_path(root, lib) + if not os.path.exists(library): + return None + return library + + return LibraryList( + [lib_or_none(lib) for lib in libraries if lib_or_none(lib) is not None] + ) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index df32012e2d..253334c416 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -374,6 +374,22 @@ def DictWrapper(dictionary): return wrapper() +def dedupe(sequence): + """Yields a stable de-duplication of an hashable sequence + + Args: + sequence: hashable sequence to be de-duplicated + + Returns: + stable de-duplication of the sequence + """ + seen = set() + for x in sequence: + if x not in seen: + yield x + seen.add(x) + + class RequiredAttributeError(ValueError): def __init__(self, message): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 8b0486c4da..158d76e568 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -527,6 +527,13 @@ class Spec(object): # XXX(deptype): default deptypes self._add_dependency(spec, ('build', 'link')) + def __getattr__(self, item): + """Delegate to self.package if the attribute is not in the spec""" + # This line is to avoid infinite recursion in case package is + # not present among self attributes + package = super(Spec, self).__getattribute__('package') + return getattr(package, item) + def get_dependency(self, name): dep = self._dependencies.get(name) if dep is not None: diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index db683917b5..0a946ff2ff 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -53,6 +53,7 @@ test_names = [ 'git_fetch', 'hg_fetch', 'install', + 'library_list', 'link_tree', 'lock', 'make_executable', diff --git a/lib/spack/spack/test/library_list.py b/lib/spack/spack/test/library_list.py new file mode 100644 index 0000000000..7fc2fd222f --- /dev/null +++ b/lib/spack/spack/test/library_list.py @@ -0,0 +1,111 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import unittest + +from llnl.util.filesystem import LibraryList + + +class LibraryListTest(unittest.TestCase): + def setUp(self): + l = [ + '/dir1/liblapack.a', + '/dir2/libfoo.dylib', + '/dir1/libblas.a', + '/dir3/libbar.so', + 'libbaz.so' + ] + self.liblist = LibraryList(l) + + def test_repr(self): + x = eval(repr(self.liblist)) + self.assertEqual(self.liblist, x) + + def test_joined_and_str(self): + s1 = self.liblist.joined() + self.assertEqual( + s1, + '/dir1/liblapack.a /dir2/libfoo.dylib /dir1/libblas.a /dir3/libbar.so libbaz.so' # NOQA: ignore=E501 + ) + s2 = str(self.liblist) + self.assertEqual(s1, s2) + s3 = self.liblist.joined(';') + self.assertEqual( + s3, + '/dir1/liblapack.a;/dir2/libfoo.dylib;/dir1/libblas.a;/dir3/libbar.so;libbaz.so' # NOQA: ignore=E501 + ) + + def test_flags(self): + search_flags = self.liblist.search_flags + self.assertTrue('-L/dir1' in search_flags) + self.assertTrue('-L/dir2' in search_flags) + self.assertTrue('-L/dir3' in search_flags) + self.assertTrue(isinstance(search_flags, str)) + + link_flags = self.liblist.link_flags + self.assertEqual( + link_flags, + '-llapack -lfoo -lblas -lbar -lbaz' + ) + + ld_flags = self.liblist.ld_flags + self.assertEqual(ld_flags, search_flags + ' ' + link_flags) + + def test_paths_manipulation(self): + names = self.liblist.names + self.assertEqual(names, ['lapack', 'foo', 'blas', 'bar', 'baz']) + + directories = self.liblist.directories + self.assertEqual(directories, ['/dir1', '/dir2', '/dir3']) + + def test_get_item(self): + a = self.liblist[0] + self.assertEqual(a, '/dir1/liblapack.a') + + b = self.liblist[:] + self.assertEqual(type(b), type(self.liblist)) + self.assertEqual(self.liblist, b) + self.assertTrue(self.liblist is not b) + + def test_add(self): + pylist = [ + '/dir1/liblapack.a', # removed from the final list + '/dir2/libbaz.so', + '/dir4/libnew.a' + ] + another = LibraryList(pylist) + l = self.liblist + another + self.assertEqual(len(l), 7) + # Invariant : l == l + l + self.assertEqual(l, l + l) + # Always produce an instance of LibraryList + self.assertEqual( + type(self.liblist), + type(self.liblist + pylist) + ) + self.assertEqual( + type(pylist + self.liblist), + type(self.liblist) + ) diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index 4356f60aca..fdd682f5e5 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -46,18 +46,20 @@ class Armadillo(Package): depends_on('hdf5', when='+hdf5') def install(self, spec, prefix): + arpack = find_libraries(['libarpack'], root=spec[ + 'arpack-ng'].prefix.lib, shared=True) + superlu = find_libraries(['libsuperlu'], root=spec[ + 'superlu'].prefix, shared=False, recurse=True) cmake_args = [ # ARPACK support - '-DARPACK_LIBRARY={0}/libarpack.{1}'.format( - spec['arpack-ng'].prefix.lib, dso_suffix), + '-DARPACK_LIBRARY={0}'.format(arpack.joined()), # BLAS support - '-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_shared_lib), + '-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_libs.joined()), # LAPACK support - '-DLAPACK_LIBRARY={0}'.format(spec['lapack'].lapack_shared_lib), + '-DLAPACK_LIBRARY={0}'.format(spec['lapack'].lapack_libs.joined()), # SuperLU support '-DSuperLU_INCLUDE_DIR={0}'.format(spec['superlu'].prefix.include), - '-DSuperLU_LIBRARY={0}/libsuperlu.a'.format( - spec['superlu'].prefix.lib64), + '-DSuperLU_LIBRARY={0}'.format(superlu.joined()), # HDF5 support '-DDETECT_HDF5={0}'.format('ON' if '+hdf5' in spec else 'OFF') ] diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 0e71125d41..7f92bc1950 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -88,17 +88,16 @@ class ArpackNg(Package): options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix) # Make sure we use Spack's blas/lapack: + lapack_libs = spec['lapack'].lapack_libs.joined() + blas_libs = spec['blas'].blas_libs.joined() + options.extend([ '-DLAPACK_FOUND=true', - '-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include, - '-DLAPACK_LIBRARIES=%s' % ( - spec['lapack'].lapack_shared_lib if '+shared' in spec else - spec['lapack'].lapack_static_lib), + '-DLAPACK_INCLUDE_DIRS={0}'.format(spec['lapack'].prefix.include), + '-DLAPACK_LIBRARIES={0}'.format(lapack_libs), '-DBLAS_FOUND=true', - '-DBLAS_INCLUDE_DIRS=%s' % spec['blas'].prefix.include, - '-DBLAS_LIBRARIES=%s' % ( - spec['blas'].blas_shared_lib if '+shared' in spec else - spec['blas'].blas_static_lib) + '-DBLAS_INCLUDE_DIRS={0}'.format(spec['blas'].prefix.include), + '-DBLAS_LIBRARIES={0}'.format(blas_libs) ]) if '+mpi' in spec: @@ -129,19 +128,12 @@ class ArpackNg(Package): 'F77=%s' % spec['mpi'].mpif77 ]) - if '+shared' in spec: - options.extend([ - '--with-blas=%s' % to_link_flags( - spec['blas'].blas_shared_lib), - '--with-lapack=%s' % to_link_flags( - spec['lapack'].lapack_shared_lib) - ]) - else: - options.extend([ - '--with-blas=%s' % spec['blas'].blas_static_lib, - '--with-lapack=%s' % spec['lapack'].lapack_static_lib, - '--enable-shared=no' - ]) + options.extend([ + '--with-blas={0}'.format(spec['blas'].blas_libs.ld_flags), + '--with-lapack={0}'.format(spec['lapack'].lapack_libs.ld_flags) + ]) + if '+shared' not in spec: + options.append('--enable-shared=no') bootstrap() configure(*options) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index f9d5da6166..e1914aac98 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -51,6 +51,7 @@ class Atlas(Package): url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2') variant('shared', default=True, description='Builds shared library') + variant('pthread', default=False, description='Use multithreaded libraries') provides('blas') provides('lapack') @@ -107,18 +108,32 @@ class Atlas(Package): make("install") self.install_test() - def setup_dependent_package(self, module, dspec): + @property + def blas_libs(self): # libsatlas.[so,dylib,dll ] contains all serial APIs (serial lapack, # serial BLAS), and all ATLAS symbols needed to support them. Whereas # libtatlas.[so,dylib,dll ] is parallel (multithreaded) version. - name = 'libsatlas.%s' % dso_suffix - libdir = find_library_path(name, - self.prefix.lib64, - self.prefix.lib) - + is_threaded = '+pthread' in self.spec if '+shared' in self.spec: - self.spec.blas_shared_lib = join_path(libdir, name) - self.spec.lapack_shared_lib = self.spec.blas_shared_lib + to_find = ['libtatlas'] if is_threaded else ['libsatlas'] + shared = True + else: + interfaces = [ + 'libptcblas', + 'libptf77blas' + ] if is_threaded else [ + 'libcblas', + 'libf77blas' + ] + to_find = ['liblapack'] + interfaces + ['libatlas'] + shared = False + return find_libraries( + to_find, root=self.prefix, shared=shared, recurse=True + ) + + @property + def lapack_libs(self): + return self.blas_libs def install_test(self): source_file = join_path(os.path.dirname(self.module.__file__), @@ -126,9 +141,8 @@ class Atlas(Package): blessed_file = join_path(os.path.dirname(self.module.__file__), 'test_cblas_dgemm.output') - include_flags = ["-I%s" % join_path(self.spec.prefix, "include")] - link_flags = ["-L%s" % join_path(self.spec.prefix, "lib"), - "-lsatlas"] + include_flags = ["-I%s" % self.spec.prefix.include] + link_flags = self.lapack_libs.ld_flags output = compile_c_and_execute(source_file, include_flags, link_flags) compare_output_file(output, blessed_file) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 785de08d34..15606cd80a 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -95,7 +95,8 @@ class Cp2k(Package): fcflags.extend([ '-I' + spec['fftw'].prefix.include ]) - ldflags = ['-L' + spec['fftw'].prefix.lib] + fftw = find_libraries(['libfftw3'], root=spec['fftw'].prefix.lib) + ldflags = [fftw.search_flags] libs = [] if '+plumed' in self.spec: # Include Plumed.inc in the Makefile @@ -157,9 +158,8 @@ class Cp2k(Package): ), '-I' + join_path(spec['pexsi'].prefix, 'fortran') ]) - ldflags.extend([ - '-L' + spec['scalapack'].prefix.lib - ]) + scalapack = spec['scalapack'].scalapack_libs + ldflags.append(scalapack.search_flags) libs.extend([ join_path(spec['elpa'].prefix.lib, 'libelpa.{0}'.format(dso_suffix)), @@ -176,20 +176,15 @@ class Cp2k(Package): 'libmetis.{0}'.format(dso_suffix) ), ]) - libs.extend(spec['scalapack'].scalapack_shared_libs) + libs.extend(scalapack) libs.extend(self.spec['mpi'].mpicxx_shared_libs) libs.extend(self.compiler.stdcxx_libs) # LAPACK / BLAS - ldflags.extend([ - '-L' + spec['lapack'].prefix.lib, - '-L' + spec['blas'].prefix.lib - ]) - libs.extend([ - join_path(spec['fftw'].prefix.lib, - 'libfftw3.{0}'.format(dso_suffix)), - spec['lapack'].lapack_shared_lib, - spec['blas'].blas_shared_lib - ]) + lapack = spec['lapack'].lapack_libs + blas = spec['blas'].blas_libs + + ldflags.append((lapack + blas).search_flags) + libs.extend([str(x) for x in (fftw, lapack, blas)]) # Write compiler flags to file mkf.write('CPPFLAGS = {0}\n'.format(' '.join(cppflags))) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 9e8c639128..89732f98b8 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -123,6 +123,7 @@ class Dealii(Package): options.remove(word) dsuf = 'dylib' if sys.platform == 'darwin' else 'so' + lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs options.extend([ '-DCMAKE_BUILD_TYPE=DebugRelease', '-DDEAL_II_COMPONENT_EXAMPLES=ON', @@ -135,9 +136,7 @@ class Dealii(Package): '-DLAPACK_FOUND=true', '-DLAPACK_INCLUDE_DIRS=%s;%s' % ( spec['lapack'].prefix.include, spec['blas'].prefix.include), - '-DLAPACK_LIBRARIES=%s;%s' % ( - spec['lapack'].lapack_shared_lib, - spec['blas'].blas_shared_lib), + '-DLAPACK_LIBRARIES=%s' % lapack_blas.joined(';'), '-DMUPARSER_DIR=%s' % spec['muparser'].prefix, '-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix, '-DTBB_DIR=%s' % spec['tbb'].prefix, diff --git a/var/spack/repos/builtin/packages/elk/package.py b/var/spack/repos/builtin/packages/elk/package.py index b089e585dd..acaf863935 100644 --- a/var/spack/repos/builtin/packages/elk/package.py +++ b/var/spack/repos/builtin/packages/elk/package.py @@ -87,12 +87,12 @@ class Elk(Package): # BLAS/LAPACK support # Note: BLAS/LAPACK must be compiled with OpenMP support # if the +openmp variant is chosen - blas = 'blas.a' + blas = 'blas.a' lapack = 'lapack.a' if '+blas' in spec: - blas = spec['blas'].blas_shared_lib + blas = spec['blas'].blas_libs.joined() if '+lapack' in spec: - lapack = spec['lapack'].lapack_shared_lib + lapack = spec['lapack'].lapack_libs.joined() # lapack must come before blas config['LIB_LPK'] = ' '.join([lapack, blas]) diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py index 1d375f2186..dd142866e5 100644 --- a/var/spack/repos/builtin/packages/gmsh/package.py +++ b/var/spack/repos/builtin/packages/gmsh/package.py @@ -87,9 +87,9 @@ class Gmsh(Package): options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF') # Make sure GMSH picks up correct BlasLapack by providing linker flags - options.append('-DBLAS_LAPACK_LIBRARIES=%s %s' % - (to_link_flags(spec['lapack'].lapack_shared_lib), - to_link_flags(spec['blas'].blas_shared_lib))) + blas_lapack = spec['lapack'].lapack_libs + spec['blas'].blas_libs + options.append( + '-DBLAS_LAPACK_LIBRARIES={0}'.format(blas_lapack.ld_flags)) # Gmsh does not have an option to compile against external metis. # Its own Metis, however, fails to build diff --git a/var/spack/repos/builtin/packages/hpl/package.py b/var/spack/repos/builtin/packages/hpl/package.py index efd5c8bb1d..fa0013de17 100644 --- a/var/spack/repos/builtin/packages/hpl/package.py +++ b/var/spack/repos/builtin/packages/hpl/package.py @@ -78,7 +78,7 @@ class Hpl(Package): 'MPlib = -L{0}'.format(spec['mpi'].prefix.lib), # Linear Algebra library (BLAS or VSIPL) 'LAinc = {0}'.format(spec['blas'].prefix.include), - 'LAlib = {0}'.format(spec['blas'].blas_shared_lib), + 'LAlib = {0}'.format(spec['blas'].blas_libs.joined()), # F77 / C interface 'F2CDEFS = -DAdd_ -DF77_INTEGER=int -DStringSunStyle', # HPL includes / libraries / specifics diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index fdc236dcf4..96ed8411dd 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -60,13 +60,14 @@ class Hypre(Package): # to command the linker to include whole static libs' content into the # shared lib # Note: --with-(lapack|blas)_libs= needs space separated list of names + lapack = spec['lapack'].lapack_libs + blas = spec['blas'].blas_libs + configure_args = [ '--prefix=%s' % prefix, - '--with-lapack-libs=%s' % to_lib_name( - spec['lapack'].lapack_shared_lib), + '--with-lapack-libs=%s' % lapack.names, '--with-lapack-lib-dirs=%s' % spec['lapack'].prefix.lib, - '--with-blas-libs=%s' % to_lib_name( - spec['blas'].blas_shared_lib), + '--with-blas-libs=%s' % blas.names, '--with-blas-lib-dirs=%s' % spec['blas'].prefix.lib ] diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 65db3351a1..74bdba455f 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -45,6 +45,10 @@ class IntelParallelStudio(IntelInstaller): variant('tools', default=True, description="Install the Intel Advisor, " "VTune Amplifier, and Inspector tools") + variant('shared', default=True, description='Builds shared library') + variant('ilp64', default=False, description='64 bit integers') + variant('openmp', default=False, description='OpenMP multithreading layer') + provides('mpi', when='@cluster:+mpi') provides('mkl', when='+mkl') provides('daal', when='+daal') @@ -55,6 +59,28 @@ class IntelParallelStudio(IntelInstaller): provides('lapack', when='+mkl') # TODO: MKL also provides implementation of Scalapack. + @property + def blas_libs(self): + shared = True if '+shared' in self.spec else False + suffix = dso_suffix if '+shared' in self.spec else 'a' + mkl_integer = ['libmkl_intel_ilp64'] if '+ilp64' in self.spec else ['libmkl_intel_lp64'] # NOQA: ignore=E501 + mkl_threading = ['libmkl_intel_thread'] if '+openmp' in self.spec else ['libmkl_sequential'] # NOQA: ignore=E501 + mkl_libs = find_libraries( + mkl_integer + ['libmkl_core'] + mkl_threading, + root=join_path(self.prefix.lib, 'intel64'), + shared=shared + ) + system_libs = [ + 'libpthread.{0}'.format(suffix), + 'libm.{0}'.format(suffix), + 'libdl.{0}'.format(suffix) + ] + return mkl_libs + system_libs + + @property + def lapack_libs(self): + return self.blas_libs + def check_variants(self, spec): error_message = '\t{variant} can not be turned off if "+all" is set' @@ -171,24 +197,6 @@ class IntelParallelStudio(IntelInstaller): os.symlink(os.path.join(self.prefix.man, "common", "man1"), os.path.join(self.prefix.man, "man1")) - def setup_dependent_package(self, module, dspec): - # For now use Single Dynamic Library: - # To set the threading layer at run time, use the - # mkl_set_threading_layer function or set MKL_THREADING_LAYER - # variable to one of the following values: INTEL, SEQUENTIAL, PGI. - # To set interface layer at run time, use the mkl_set_interface_layer - # function or set the MKL_INTERFACE_LAYER variable to LP64 or ILP64. - - # Otherwise one would need to specify several libraries - # (e.g. mkl_intel_lp64;mkl_sequential;mkl_core), which reflect - # different interface and threading layers. - - name = 'libmkl_rt.%s' % dso_suffix - libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib) - - self.spec.blas_shared_lib = join_path(libdir, name) - self.spec.lapack_shared_lib = self.spec.blas_shared_lib - def setup_environment(self, spack_env, run_env): # TODO: Determine variables needed for the professional edition. diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py index 71a233ff3e..a7353d57c4 100644 --- a/var/spack/repos/builtin/packages/mkl/package.py +++ b/var/spack/repos/builtin/packages/mkl/package.py @@ -24,13 +24,40 @@ class Mkl(IntelInstaller): version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34', url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd()) + variant('shared', default=True, description='Builds shared library') + variant('ilp64', default=False, description='64 bit integers') + variant('openmp', default=False, description='OpenMP multithreading layer') + # virtual dependency provides('blas') provides('lapack') # TODO: MKL also provides implementation of Scalapack. - def install(self, spec, prefix): + @property + def blas_libs(self): + shared = True if '+shared' in self.spec else False + suffix = dso_suffix if '+shared' in self.spec else 'a' + mkl_integer = ['libmkl_intel_ilp64'] if '+ilp64' in self.spec else ['libmkl_intel_lp64'] # NOQA: ignore=E501 + mkl_threading = ['libmkl_sequential'] + if '+openmp' in spec: + mkl_threading = ['libmkl_intel_thread'] if '%intel' in self.spec else ['libmkl_gnu_thread'] # NOQA: ignore=E501 + mkl_libs = find_libraries( + mkl_integer + ['libmkl_core'] + mkl_threading, + root=join_path(self.prefix.lib, 'intel64'), + shared=shared + ) + system_libs = [ + 'libpthread.{0}'.format(suffix), + 'libm.{0}'.format(suffix), + 'libdl.{0}'.format(suffix) + ] + return mkl_libs + system_libs + + @property + def lapack_libs(self): + return self.blas_libs + def install(self, spec, prefix): self.intel_prefix = os.path.join(prefix, "pkg") IntelInstaller.install(self, spec, prefix) @@ -45,25 +72,6 @@ class Mkl(IntelInstaller): os.symlink(os.path.join(mkl_lib_dir, f), os.path.join(self.prefix, "lib", f)) - def setup_dependent_package(self, module, dspec): - # For now use Single Dynamic Library: - # To set the threading layer at run time, use the - # mkl_set_threading_layer function or set MKL_THREADING_LAYER - # variable to one of the following values: INTEL, SEQUENTIAL, PGI. - # To set interface layer at run time, use the mkl_set_interface_layer - # function or set the MKL_INTERFACE_LAYER variable to LP64 or ILP64. - - # Otherwise one would need to specify several libraries - # (e.g. mkl_intel_lp64;mkl_sequential;mkl_core), which reflect - # different interface and threading layers. - - name = 'libmkl_rt.%s' % dso_suffix - libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib) - - # Now set blas/lapack libs: - self.spec.blas_shared_lib = join_path(libdir, name) - self.spec.lapack_shared_lib = self.spec.blas_shared_lib - def setup_dependent_environment(self, spack_env, run_env, dependent_spec): # set up MKLROOT for everyone using MKL package spack_env.set('MKLROOT', self.prefix) diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 32bc42a9c3..3466f091a0 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -75,9 +75,8 @@ class Mumps(Package): raise RuntimeError( 'You cannot use the variants parmetis or ptscotch without mpi') - makefile_conf = ["LIBBLAS = %s" % to_link_flags( - self.spec['blas'].blas_shared_lib) - ] + blas = self.spec['blas'].blas_libs + makefile_conf = ["LIBBLAS = %s" % blas.ld_flags] orderings = ['-Dpord'] @@ -136,11 +135,12 @@ class Mumps(Package): 'OPTC = %s -O ' % fpic]) if '+mpi' in self.spec: + scalapack = self.spec['scalapack'].scalapack_libs makefile_conf.extend( ["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), "FC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), "FL = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), - "SCALAP = %s" % self.spec['scalapack'].fc_link, + "SCALAP = %s" % scalapack.ld_flags, "MUMPS_TYPE = par"]) else: makefile_conf.extend( diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index 93d64fb466..b73fe850d7 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -67,6 +67,20 @@ class NetlibLapack(Package): '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True) + @property + def blas_libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + ['libblas'], root=self.prefix, shared=shared, recurse=True + ) + + @property + def lapack_libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + ['liblapack'], root=self.prefix, shared=shared, recurse=True + ) + def install_one(self, spec, prefix, shared): cmake_args = [ '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'), @@ -100,18 +114,3 @@ class NetlibLapack(Package): # Build shared libraries if requested. if '+shared' in spec: self.install_one(spec, prefix, True) - - def setup_dependent_package(self, module, dspec): - # This is WIP for a prototype interface for virtual packages. - # We can update this as more builds start depending on BLAS/LAPACK. - libdir = find_library_path( - 'libblas.a', self.prefix.lib64, self.prefix.lib) - - self.spec.blas_static_lib = join_path(libdir, 'libblas.a') - self.spec.lapack_static_lib = join_path(libdir, 'liblapack.a') - - if '+shared' in self.spec: - self.spec.blas_shared_lib = join_path( - libdir, 'libblas.%s' % dso_suffix) - self.spec.lapack_shared_lib = join_path( - libdir, 'liblapack.%s' % dso_suffix) diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py index 13e932e176..fe9f1a3023 100644 --- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py @@ -28,10 +28,11 @@ import sys class NetlibScalapack(Package): """ScaLAPACK is a library of high-performance linear algebra routines for - parallel distributed memory machines""" + parallel distributed memory machines + """ homepage = "http://www.netlib.org/scalapack/" - url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz" + url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz" version('2.0.2', '2f75e600a2ba155ed9ce974a1c4b536f') version('2.0.1', '17b8cde589ea0423afe1ec43e7499161') @@ -39,10 +40,16 @@ class NetlibScalapack(Package): # versions before 2.0.0 are not using cmake and requires blacs as # a separated package - variant('shared', default=True, - description='Build the shared library version') - variant('fpic', default=False, - description="Build with -fpic compiler option") + variant( + 'shared', + default=True, + description='Build the shared library version' + ) + variant( + 'fpic', + default=False, + description='Build with -fpic compiler option' + ) provides('scalapack') @@ -51,6 +58,13 @@ class NetlibScalapack(Package): depends_on('blas') depends_on('cmake', when='@2.0.0:', type='build') + @property + def scalapack_libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + ['libscalapack'], root=self.prefix, shared=shared, recurse=True + ) + def install(self, spec, prefix): options = [ "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else @@ -60,14 +74,13 @@ class NetlibScalapack(Package): ] # Make sure we use Spack's Lapack: + blas = spec['blas'].blas_libs + lapack = spec['lapack'].lapack_libs options.extend([ '-DLAPACK_FOUND=true', - '-DLAPACK_LIBRARIES=%s' % ( - spec['lapack'].lapack_shared_lib if '+shared' in spec else - spec['lapack'].lapack_static_lib), - '-DBLAS_LIBRARIES=%s' % ( - spec['blas'].blas_shared_lib if '+shared' in spec else - spec['blas'].blas_static_lib) + '-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include, + '-DLAPACK_LIBRARIES=%s' % (lapack.joined()), + '-DBLAS_LIBRARIES=%s' % (blas.joined()) ]) if '+fpic' in spec: @@ -86,12 +99,3 @@ class NetlibScalapack(Package): # The shared libraries are not installed correctly on Darwin: if (sys.platform == 'darwin') and ('+shared' in spec): fix_darwin_install_name(prefix.lib) - - def setup_dependent_package(self, module, dependent_spec): - spec = self.spec - lib_suffix = dso_suffix if '+shared' in spec else 'a' - - spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib - spec.cc_link = spec.fc_link - spec.libraries = [join_path(spec.prefix.lib, - 'libscalapack.%s' % lib_suffix)] diff --git a/var/spack/repos/builtin/packages/nwchem/package.py b/var/spack/repos/builtin/packages/nwchem/package.py index 13c710a35a..b15c1c02fd 100644 --- a/var/spack/repos/builtin/packages/nwchem/package.py +++ b/var/spack/repos/builtin/packages/nwchem/package.py @@ -66,6 +66,9 @@ class Nwchem(Package): patch('Gcc6_macs_optfix.patch', when='@6.6', level=0) def install(self, spec, prefix): + scalapack = spec['scalapack'].scalapack_libs + lapack = spec['lapack'].lapack_libs + blas = spec['blas'].blas_libs # see http://www.nwchem-sw.org/index.php/Compiling_NWChem args = [] args.extend([ @@ -79,13 +82,11 @@ class Nwchem(Package): 'USE_PYTHONCONFIG=y', 'PYTHONVERSION=%s' % spec['python'].version.up_to(2), 'PYTHONHOME=%s' % spec['python'].prefix, - 'BLASOPT=%s %s' % ( - to_link_flags(spec['lapack'].lapack_shared_lib), - to_link_flags(spec['blas'].blas_shared_lib)), - 'BLAS_LIB=%s' % to_link_flags(spec['blas'].blas_shared_lib), - 'LAPACK_LIB=%s' % to_link_flags(spec['lapack'].lapack_shared_lib), + 'BLASOPT=%s' % ((lapack + blas).ld_flags), + 'BLAS_LIB=%s' % blas.ld_flags, + 'LAPACK_LIB=%s' % lapack.ld_flags, 'USE_SCALAPACK=y', - 'SCALAPACK=%s' % spec['scalapack'].fc_link, + 'SCALAPACK=%s' % scalapack.ld_flags, 'NWCHEM_MODULES=all python', 'NWCHEM_LONG_PATHS=Y' # by default NWCHEM_TOP is 64 char max ]) diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index ff4a106940..adb760f06d 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -46,13 +46,13 @@ class Octopus(Package): # FEAST, Libfm, PFFT, ISF, PNFFT def install(self, spec, prefix): + lapack = spec['lapack'].lapack_libs + blas = spec['blas'].blas_libs args = [] args.extend([ '--prefix=%s' % prefix, - '--with-blas=%s' % to_link_flags( - spec['blas'].blas_shared_lib), - '--with-lapack=%s' % to_link_flags( - spec['lapack'].lapack_shared_lib), + '--with-blas=%s' % blas.ld_flags, + '--with-lapack=%s' % lapack.ld_flags, '--with-gsl-prefix=%s' % spec['gsl'].prefix, '--with-libxc-prefix=%s' % spec['libxc'].prefix, 'CC=%s' % spec['mpi'].mpicc, diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 23c7b02daf..89c35fb991 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -30,7 +30,7 @@ import os class Openblas(Package): """OpenBLAS: An optimized BLAS library""" homepage = "http://www.openblas.net" - url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" version('0.2.19', '28c998054fd377279741c6f0b9ea7941') version('0.2.18', '805e7f660877d588ea7e3792cda2ee65') @@ -51,6 +51,17 @@ class Openblas(Package): patch('make.patch') + @property + def blas_libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + ['libopenblas'], root=self.prefix, shared=shared, recurse=True + ) + + @property + def lapack_libs(self): + return self.blas_libs + def install(self, spec, prefix): # As of 06/2016 there is no mechanism to specify that packages which # depends on Blas/Lapack need C or/and Fortran symbols. For now @@ -100,6 +111,9 @@ class Openblas(Package): # no quotes around prefix (spack doesn't use a shell) make('install', "PREFIX=%s" % prefix, *make_defs) + # TODO : the links below are mainly there because client + # TODO : packages are wrongly written. Check if they can be removed + # Blas virtual package should provide blas.a and libblas.a with working_dir(prefix.lib): symlink('libopenblas.a', 'blas.a') @@ -120,21 +134,6 @@ class Openblas(Package): # test. self.check_install(spec) - def setup_dependent_package(self, module, dspec): - # This is WIP for a prototype interface for virtual packages. - # We can update this as more builds start depending on BLAS/LAPACK. - libdir = find_library_path('libopenblas.a', - self.prefix.lib64, - self.prefix.lib) - - self.spec.blas_static_lib = join_path(libdir, 'libopenblas.a') - self.spec.lapack_static_lib = self.spec.blas_static_lib - - if '+shared' in self.spec: - self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' % - dso_suffix) - self.spec.lapack_shared_lib = self.spec.blas_shared_lib - def check_install(self, spec): source_file = join_path(os.path.dirname(self.module.__file__), 'test_cblas_dgemm.c') diff --git a/var/spack/repos/builtin/packages/opium/package.py b/var/spack/repos/builtin/packages/opium/package.py index 2c81d92cc0..521f917230 100644 --- a/var/spack/repos/builtin/packages/opium/package.py +++ b/var/spack/repos/builtin/packages/opium/package.py @@ -29,7 +29,7 @@ class Opium(Package): """DFT pseudopotential generation project""" homepage = "https://opium.sourceforge.net/index.html" - url = "https://downloads.sourceforge.net/project/opium/opium/opium-v3.8/opium-v3.8-src.tgz" + url = "https://downloads.sourceforge.net/project/opium/opium/opium-v3.8/opium-v3.8-src.tgz" version('3.8', 'f710c0f869e70352b4a510c31e13bf9f') @@ -37,12 +37,8 @@ class Opium(Package): depends_on('lapack') def install(self, spec, prefix): - options = [ - 'LDFLAGS=%s %s' % ( - to_link_flags(spec['lapack'].lapack_shared_lib), - to_link_flags(spec['blas'].blas_shared_lib) - ) - ] + libs = spec['lapack'].lapack_libs + spec['blas'].blas_libs + options = ['LDFLAGS=%s' % libs.ld_flags] configure(*options) with working_dir("src", create=False): diff --git a/var/spack/repos/builtin/packages/pexsi/package.py b/var/spack/repos/builtin/packages/pexsi/package.py index e74ad6df01..9fc71d4c52 100644 --- a/var/spack/repos/builtin/packages/pexsi/package.py +++ b/var/spack/repos/builtin/packages/pexsi/package.py @@ -67,8 +67,8 @@ class Pexsi(Package): '@PARMETIS_PREFIX': self.spec['parmetis'].prefix, '@LAPACK_PREFIX': self.spec['lapack'].prefix, '@BLAS_PREFIX': self.spec['blas'].prefix, - '@LAPACK_LIBS': self.spec['lapack'].lapack_shared_lib, - '@BLAS_LIBS': self.spec['lapack'].blas_shared_lib, + '@LAPACK_LIBS': self.spec['lapack'].lapack_libs.joined(), + '@BLAS_LIBS': self.spec['lapack'].blas_libs.joined(), '@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs) } diff --git a/var/spack/repos/builtin/packages/psi4/package.py b/var/spack/repos/builtin/packages/psi4/package.py index 6296d0cee6..566aa50f44 100644 --- a/var/spack/repos/builtin/packages/psi4/package.py +++ b/var/spack/repos/builtin/packages/psi4/package.py @@ -32,7 +32,7 @@ class Psi4(Package): a variety of molecular properties.""" homepage = "http://www.psicode.org/" - url = "https://github.com/psi4/psi4/archive/0.5.tar.gz" + url = "https://github.com/psi4/psi4/archive/0.5.tar.gz" version('0.5', '53041b8a9be3958384171d0d22f9fdd0') @@ -62,9 +62,10 @@ class Psi4(Package): def install(self, spec, prefix): cmake_args = [ '-DBLAS_TYPE={0}'.format(spec['blas'].name.upper()), - '-DBLAS_LIBRARIES={0}'.format(spec['blas'].blas_shared_lib), + '-DBLAS_LIBRARIES={0}'.format(spec['blas'].blas_libs.joined()), '-DLAPACK_TYPE={0}'.format(spec['lapack'].name.upper()), - '-DLAPACK_LIBRARIES={0}'.format(spec['lapack'].lapack_shared_lib), + '-DLAPACK_LIBRARIES={0}'.format( + spec['lapack'].lapack_libs.joined()), '-DBOOST_INCLUDEDIR={0}'.format(spec['boost'].prefix.include), '-DBOOST_LIBRARYDIR={0}'.format(spec['boost'].prefix.lib), '-DENABLE_CHEMPS2=OFF' @@ -90,9 +91,9 @@ class Psi4(Package): kwargs = {'ignore_absent': True, 'backup': False, 'string': True} - cc_files = ['bin/psi4-config'] + cc_files = ['bin/psi4-config'] cxx_files = ['bin/psi4-config', 'include/psi4/psiconfig.h'] - template = 'share/psi4/plugin/Makefile.template' + template = 'share/psi4/plugin/Makefile.template' for filename in cc_files: filter_file(os.environ['CC'], self.compiler.cc, diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 83a69c5682..68b2cb13e0 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -47,7 +47,7 @@ class PyScipy(Package): env['ATLAS'] = join_path( spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix) else: - env['BLAS'] = spec['blas'].blas_shared_lib - env['LAPACK'] = spec['lapack'].lapack_shared_lib + env['BLAS'] = spec['blas'].blas_libs.joined() + env['LAPACK'] = spec['lapack'].lapack_libs.joined() python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index a71bfd8bd4..22f069885d 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -80,8 +80,8 @@ class SuiteSparse(Package): # Make sure Spack's Blas/Lapack is used. Otherwise System's # Blas/Lapack might be picked up. - blas = to_link_flags(spec['blas'].blas_shared_lib) - lapack = to_link_flags(spec['lapack'].lapack_shared_lib) + blas = spec['blas'].blas_libs.ld_flags + lapack = spec['lapack'].lapack_libs.ld_flags if '@4.5.1' in spec: # adding -lstdc++ is clearly an ugly way to do this, but it follows # with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index 7582954ab1..6ee247b7ea 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -31,7 +31,7 @@ class Sundials(Package): Solvers)""" homepage = "http://computation.llnl.gov/casc/sundials/" - url = "http://computation.llnl.gov/projects/sundials-suite-nonlinear-differential-algebraic-equation-solvers/download/sundials-2.6.2.tar.gz" + url = "http://computation.llnl.gov/projects/sundials-suite-nonlinear-differential-algebraic-equation-solvers/download/sundials-2.6.2.tar.gz" version('2.6.2', '3deeb0ede9f514184c6bd83ecab77d95') @@ -79,9 +79,9 @@ class Sundials(Package): if '+lapack' in spec: cmake_args.extend([ '-DLAPACK_ENABLE=ON', - '-DLAPACK_LIBRARIES={0};{1}'.format( - spec['lapack'].lapack_shared_lib, - spec['blas'].blas_shared_lib + '-DLAPACK_LIBRARIES={0}'.format( + (spec['lapack'].lapack_libs + + spec['blas'].blas_libs).joined(';') ) ]) else: @@ -113,7 +113,7 @@ class Sundials(Package): elif '+pthread' in spec: cmake_args.append('-DSUPERLUMT_THREAD_TYPE=Pthread') else: - msg = 'You must choose either +openmp or +pthread when ' + msg = 'You must choose either +openmp or +pthread when ' msg += 'building with SuperLU_MT' raise RuntimeError(msg) else: diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 85b7f689d0..a29b74bf08 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -46,15 +46,14 @@ class SuperluDist(Package): depends_on('metis@5:') def install(self, spec, prefix): + lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs makefile_inc = [] makefile_inc.extend([ 'PLAT = _mac_x', 'DSuperLUroot = %s' % self.stage.source_path, 'DSUPERLULIB = $(DSuperLUroot)/lib/libsuperlu_dist.a', 'BLASDEF = -DUSE_VENDOR_BLAS', - 'BLASLIB = %s %s' % - (to_link_flags(spec['lapack'].lapack_shared_lib), - to_link_flags(spec['blas'].blas_shared_lib)), + 'BLASLIB = %s' % lapack_blas.ld_flags, 'METISLIB = -L%s -lmetis' % spec['metis'].prefix.lib, 'PARMETISLIB = -L%s -lparmetis' % spec['parmetis'].prefix.lib, 'FLIBS =', diff --git a/var/spack/repos/builtin/packages/superlu/package.py b/var/spack/repos/builtin/packages/superlu/package.py index c634c1d1ba..d9cff650b6 100644 --- a/var/spack/repos/builtin/packages/superlu/package.py +++ b/var/spack/repos/builtin/packages/superlu/package.py @@ -42,7 +42,7 @@ class Superlu(Package): '-DCMAKE_POSITION_INDEPENDENT_CODE=ON', # BLAS support '-Denable_blaslib=OFF', - '-DBLAS_blas_LIBRARY={0}'.format(spec['blas'].blas_shared_lib) + '-DBLAS_blas_LIBRARY={0}'.format(spec['blas'].blas_libs.joined()) ] cmake_args.extend(std_cmake_args) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 3a88f67340..716d8f8eb6 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -132,6 +132,8 @@ class Trilinos(Package): mpi_bin = spec['mpi'].prefix.bin # Note: -DXYZ_LIBRARY_NAMES= needs semicolon separated list of names + blas = spec['blas'].blas_libs + lapack = spec['lapack'].lapack_libs options.extend([ '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON', @@ -145,12 +147,10 @@ class Trilinos(Package): '-DTPL_ENABLE_MPI:BOOL=ON', '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix, '-DTPL_ENABLE_BLAS=ON', - '-DBLAS_LIBRARY_NAMES=%s' % to_lib_name( - spec['blas'].blas_shared_lib), + '-DBLAS_LIBRARY_NAMES=%s' % ';'.join(blas.names), '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib, '-DTPL_ENABLE_LAPACK=ON', - '-DLAPACK_LIBRARY_NAMES=%s' % to_lib_name( - spec['lapack'].lapack_shared_lib), + '-DLAPACK_LIBRARY_NAMES=%s' % ';'.join(lapack.names), '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix.lib, '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTrilinos_ENABLE_CXX11:BOOL=ON', diff --git a/var/spack/repos/builtin/packages/wannier90/package.py b/var/spack/repos/builtin/packages/wannier90/package.py index 189e568cdc..119d2cf769 100644 --- a/var/spack/repos/builtin/packages/wannier90/package.py +++ b/var/spack/repos/builtin/packages/wannier90/package.py @@ -47,13 +47,12 @@ class Wannier90(Package): def install(self, spec, prefix): + lapack = self.spec['lapack'].lapack_libs + blas = self.spec['blas'].blas_libs substitutions = { '@F90': spack_fc, '@MPIF90': self.spec['mpi'].mpifc, - '@LIBS': ' '.join([ - self.spec['lapack'].lapack_shared_lib, - self.spec['lapack'].blas_shared_lib - ]) + '@LIBS': (lapack + blas).joined() } ####### # TODO : this part is replicated in PEXSI -- cgit v1.2.3-60-g2f50 From 1db25526445f6d8764708c39315f77be3f2d57c8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 22 Sep 2016 09:31:28 +0200 Subject: fix : stops infinite recursion for python 2.6 (#1823) --- lib/spack/spack/spec.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 158d76e568..c92594da72 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -531,8 +531,9 @@ class Spec(object): """Delegate to self.package if the attribute is not in the spec""" # This line is to avoid infinite recursion in case package is # not present among self attributes - package = super(Spec, self).__getattribute__('package') - return getattr(package, item) + if item.endswith('libs'): + return getattr(self.package, item) + raise AttributeError() def get_dependency(self, name): dep = self._dependencies.get(name) -- cgit v1.2.3-60-g2f50 From 98f9dd266fe53444c77bcb6b810dda788c1fa558 Mon Sep 17 00:00:00 2001 From: Elizabeth Fischer Date: Thu, 22 Sep 2016 03:43:33 -0400 Subject: Remove SPACK_DIRTY env var (#1818) * Removed SPACK_DIRTY env var support. * Finished removing SPACK_DIRTY support. * Minor changes. --- lib/spack/spack/cmd/diy.py | 5 ++--- lib/spack/spack/cmd/install.py | 6 ++---- lib/spack/spack/cmd/setup.py | 5 ++--- 3 files changed, 6 insertions(+), 10 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 1a3e2fd65c..9833e8cdce 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -54,8 +54,7 @@ def setup_parser(subparser): help="specs to use for install. Must contain package AND version.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', - help="Install a package *without* cleaning the environment. " + - "Or set SPACK_DIRTY environment variable") + help="Install a package *without* cleaning the environment.") def diy(self, args): @@ -105,4 +104,4 @@ def diy(self, args): ignore_deps=args.ignore_deps, verbose=not args.quiet, keep_stage=True, # don't remove source dir for DIY. - dirty=args.dirty or ('SPACK_DIRTY' in os.environ)) + dirty=args.dirty) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index a77af37ed0..7663a97a28 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -28,7 +28,6 @@ import llnl.util.tty as tty import spack import spack.cmd -import os description = "Build and install packages" @@ -57,8 +56,7 @@ def setup_parser(subparser): help="Fake install. Just remove prefix and create a fake file.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', - help="Install a package *without* cleaning the environment. " + - "Or set SPACK_DIRTY environment variable") + help="Install a package *without* cleaning the environment.") subparser.add_argument( 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") @@ -90,5 +88,5 @@ def install(parser, args): run_tests=args.run_tests, verbose=args.verbose, fake=args.fake, - dirty=args.dirty or ('SPACK_DIRTY' in os.environ), + dirty=args.dirty, explicit=True) diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py index 66095ee628..4dfa13eccf 100644 --- a/lib/spack/spack/cmd/setup.py +++ b/lib/spack/spack/cmd/setup.py @@ -48,8 +48,7 @@ def setup_parser(subparser): help="specs to use for install. Must contain package AND version.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', - help="Install a package *without* cleaning the environment. " + - "Or set SPACK_DIRTY environment variable") + help="Install a package *without* cleaning the environment.") def setup(self, args): @@ -96,4 +95,4 @@ def setup(self, args): verbose=args.verbose, keep_stage=True, # don't remove source dir for SETUP. install_phases=set(['setup', 'provenance']), - dirty=args.dirty or ('SPACK_DIRTY' in os.environ)) + dirty=args.dirty) -- cgit v1.2.3-60-g2f50 From 025b779a30476dd2b6ba9851e4ef1d57812b97c7 Mon Sep 17 00:00:00 2001 From: Eric Date: Thu, 22 Sep 2016 09:43:47 +0200 Subject: Fix sbang for perl (#1802) * Perform shebang fix for all files * Fix sbang for perl scripts Otherwise perl would look at the #! line and call sbang again, resulting in an infinite loop. --- bin/sbang | 8 ++++++-- lib/spack/spack/hooks/sbang.py | 12 +++++++----- 2 files changed, 13 insertions(+), 7 deletions(-) (limited to 'lib') diff --git a/bin/sbang b/bin/sbang index 1ea5f06592..e71074b330 100755 --- a/bin/sbang +++ b/bin/sbang @@ -111,8 +111,12 @@ while read line && ((lines < 2)) ; do done < "$script" # Invoke any interpreter found, or raise an error if none was found. -if [ -n "$interpreter" ]; then - exec $interpreter "$@" +if [[ -n "$interpreter" ]]; then + if [[ "${interpreter##*/}" = "perl" ]]; then + exec $interpreter -x "$@" + else + exec $interpreter "$@" + fi else echo "error: sbang found no interpreter in $script" exit 1 diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 02c1ce3816..6f9736a018 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -81,8 +81,10 @@ def filter_shebang(path): tty.warn("Patched overlong shebang in %s" % path) -def filter_shebangs_in_directory(directory): - for file in os.listdir(directory): +def filter_shebangs_in_directory(directory, filenames=None): + if filenames is None: + filenames = os.listdir(directory) + for file in filenames: path = os.path.join(directory, file) # only handle files @@ -104,6 +106,6 @@ def post_install(pkg): """This hook edits scripts so that they call /bin/bash $spack_prefix/bin/sbang instead of something longer than the shebang limit.""" - if not os.path.isdir(pkg.prefix.bin): - return - filter_shebangs_in_directory(pkg.prefix.bin) + + for directory, _, filenames in os.walk(pkg.prefix): + filter_shebangs_in_directory(directory, filenames) -- cgit v1.2.3-60-g2f50 From f1f301ad3b994bd4c3d17db4a4bf5e5be2fa5ed8 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 22 Sep 2016 13:33:29 -0500 Subject: Fix spack checksum output indentation (#1826) --- lib/spack/spack/cmd/checksum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index aedb0fd99c..2e24d0527e 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -113,6 +113,6 @@ def checksum(parser, args): tty.die("Could not fetch any versions for %s" % pkg.name) version_lines = [ - " version('%s', '%s')" % (v, h) for v, h in version_hashes + " version('%s', '%s')" % (v, h) for v, h in version_hashes ] tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) -- cgit v1.2.3-60-g2f50 From c7860322f5a07450ca7a5a60e97be95eeb33fc23 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Thu, 22 Sep 2016 14:47:25 -0400 Subject: package: fix variable name usage (#1829) --- lib/spack/spack/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 38bc6fa3f4..fe19e5d400 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -722,7 +722,7 @@ class Package(object): if not ignore_checksum: raise FetchError("Will not fetch %s" % - self.spec.format('$_$@'), checksum_msg) + self.spec.format('$_$@'), ck_msg) self.stage.fetch(mirror_only) -- cgit v1.2.3-60-g2f50 From cb36aadaf6cfb0373e28b435cb0a4ea3ea19aac4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 23 Sep 2016 18:01:38 -0700 Subject: Fix doc bugs, widen columns in the package list --- lib/spack/docs/conf.py | 2 +- var/spack/repos/builtin/packages/heppdt/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'lib') diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 43d37f1526..ed3e5811bc 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -65,7 +65,7 @@ spack_version = subprocess.Popen( # Set an environment variable so that colify will print output like it would to # a terminal. -os.environ['COLIFY_SIZE'] = '25x80' +os.environ['COLIFY_SIZE'] = '25x120' # # Generate package list using spack command diff --git a/var/spack/repos/builtin/packages/heppdt/package.py b/var/spack/repos/builtin/packages/heppdt/package.py index 5fb7ec86d5..54c846ae33 100644 --- a/var/spack/repos/builtin/packages/heppdt/package.py +++ b/var/spack/repos/builtin/packages/heppdt/package.py @@ -27,7 +27,7 @@ from spack import * class Heppdt(Package): - """ The HepPID library contains translation methods for particle ID's + """The HepPID library contains translation methods for particle ID's to and from various Monte Carlo generators and the PDG standard numbering scheme. We realize that the generators adhere closely to the standard, but there are occasional differences.""" -- cgit v1.2.3-60-g2f50 From cd960caf8dfb5cc80b8dae9493e3a078e45c7713 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Sep 2016 08:52:02 -0400 Subject: Clean up Exceptions in `spec.py` --- lib/spack/spack/spec.py | 63 ++-------------------------------------- lib/spack/spack/test/spec_dag.py | 6 ++-- 2 files changed, 6 insertions(+), 63 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index c92594da72..99fc2d3ea4 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1569,7 +1569,7 @@ class Spec(object): # actually deps of this package. Raise an error. extra = set(spec_deps.keys()).difference(visited) if extra: - raise InvalidDependencyException( + raise InvalidDependencyError( self.name + " does not depend on " + comma_or(extra)) # Mark the spec as normal once done. @@ -2667,17 +2667,11 @@ def parse_anonymous_spec(spec_like, pkg_name): class SpecError(spack.error.SpackError): - """Superclass for all errors that occur while constructing specs.""" - def __init__(self, message): - super(SpecError, self).__init__(message) - class SpecParseError(SpecError): - """Wrapper for ParseError for when we're parsing specs.""" - def __init__(self, parse_error): super(SpecParseError, self).__init__(parse_error.message) self.string = parse_error.string @@ -2685,79 +2679,49 @@ class SpecParseError(SpecError): class DuplicateDependencyError(SpecError): - """Raised when the same dependency occurs in a spec twice.""" - def __init__(self, message): - super(DuplicateDependencyError, self).__init__(message) - class DuplicateVariantError(SpecError): - """Raised when the same variant occurs in a spec twice.""" - def __init__(self, message): - super(DuplicateVariantError, self).__init__(message) - class DuplicateCompilerSpecError(SpecError): - """Raised when the same compiler occurs in a spec twice.""" - def __init__(self, message): - super(DuplicateCompilerSpecError, self).__init__(message) - class UnsupportedCompilerError(SpecError): - """Raised when the user asks for a compiler spack doesn't know about.""" - def __init__(self, compiler_name): super(UnsupportedCompilerError, self).__init__( "The '%s' compiler is not yet supported." % compiler_name) class UnknownVariantError(SpecError): - """Raised when the same variant occurs in a spec twice.""" - def __init__(self, pkg, variant): super(UnknownVariantError, self).__init__( "Package %s has no variant %s!" % (pkg, variant)) class DuplicateArchitectureError(SpecError): - """Raised when the same architecture occurs in a spec twice.""" - def __init__(self, message): - super(DuplicateArchitectureError, self).__init__(message) - class InconsistentSpecError(SpecError): - """Raised when two nodes in the same spec DAG have inconsistent constraints.""" - def __init__(self, message): - super(InconsistentSpecError, self).__init__(message) - - -class InvalidDependencyException(SpecError): +class InvalidDependencyError(SpecError): """Raised when a dependency in a spec is not actually a dependency of the package.""" - def __init__(self, message): - super(InvalidDependencyException, self).__init__(message) - class NoProviderError(SpecError): - """Raised when there is no package that provides a particular virtual dependency. """ - def __init__(self, vpkg): super(NoProviderError, self).__init__( "No providers found for virtual package: '%s'" % vpkg) @@ -2765,11 +2729,9 @@ class NoProviderError(SpecError): class MultipleProviderError(SpecError): - """Raised when there is no package that provides a particular virtual dependency. """ - def __init__(self, vpkg, providers): """Takes the name of the vpkg""" super(MultipleProviderError, self).__init__( @@ -2780,10 +2742,8 @@ class MultipleProviderError(SpecError): class UnsatisfiableSpecError(SpecError): - """Raised when a spec conflicts with package constraints. Provide the requirement that was violated when raising.""" - def __init__(self, provided, required, constraint_type): super(UnsatisfiableSpecError, self).__init__( "%s does not satisfy %s" % (provided, required)) @@ -2793,89 +2753,72 @@ class UnsatisfiableSpecError(SpecError): class UnsatisfiableSpecNameError(UnsatisfiableSpecError): - """Raised when two specs aren't even for the same package.""" - def __init__(self, provided, required): super(UnsatisfiableSpecNameError, self).__init__( provided, required, "name") class UnsatisfiableVersionSpecError(UnsatisfiableSpecError): - """Raised when a spec version conflicts with package constraints.""" - def __init__(self, provided, required): super(UnsatisfiableVersionSpecError, self).__init__( provided, required, "version") class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError): - """Raised when a spec comiler conflicts with package constraints.""" - def __init__(self, provided, required): super(UnsatisfiableCompilerSpecError, self).__init__( provided, required, "compiler") class UnsatisfiableVariantSpecError(UnsatisfiableSpecError): - """Raised when a spec variant conflicts with package constraints.""" - def __init__(self, provided, required): super(UnsatisfiableVariantSpecError, self).__init__( provided, required, "variant") class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError): - """Raised when a spec variant conflicts with package constraints.""" - def __init__(self, provided, required): super(UnsatisfiableCompilerFlagSpecError, self).__init__( provided, required, "compiler_flags") class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError): - """Raised when a spec architecture conflicts with package constraints.""" - def __init__(self, provided, required): super(UnsatisfiableArchitectureSpecError, self).__init__( provided, required, "architecture") class UnsatisfiableProviderSpecError(UnsatisfiableSpecError): - """Raised when a provider is supplied but constraints don't match a vpkg requirement""" - def __init__(self, provided, required): super(UnsatisfiableProviderSpecError, self).__init__( provided, required, "provider") + # TODO: get rid of this and be more specific about particular incompatible # dep constraints class UnsatisfiableDependencySpecError(UnsatisfiableSpecError): - """Raised when some dependency of constrained specs are incompatible""" - def __init__(self, provided, required): super(UnsatisfiableDependencySpecError, self).__init__( provided, required, "dependency") class SpackYAMLError(spack.error.SpackError): - def __init__(self, msg, yaml_error): super(SpackYAMLError, self).__init__(msg, str(yaml_error)) class AmbiguousHashError(SpecError): - def __init__(self, msg, *specs): super(AmbiguousHashError, self).__init__(msg) for spec in specs: diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 5c2731041c..40cdb02966 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -241,15 +241,15 @@ class SpecDagTest(MockPackagesTest): def test_invalid_dep(self): spec = Spec('libelf ^mpich') - self.assertRaises(spack.spec.InvalidDependencyException, + self.assertRaises(spack.spec.InvalidDependencyError, spec.normalize) spec = Spec('libelf ^libdwarf') - self.assertRaises(spack.spec.InvalidDependencyException, + self.assertRaises(spack.spec.InvalidDependencyError, spec.normalize) spec = Spec('mpich ^dyninst ^libelf') - self.assertRaises(spack.spec.InvalidDependencyException, + self.assertRaises(spack.spec.InvalidDependencyError, spec.normalize) def test_equal(self): -- cgit v1.2.3-60-g2f50 From 43ca805248deb8fe2f87c0009ab44cc30458e39a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Sep 2016 09:28:22 -0400 Subject: Factor out canonical_deptype function, cleanup spec.py --- lib/spack/spack/spec.py | 87 +++++++++++++++++++++++++++++++------ lib/spack/spack/test/spec_syntax.py | 54 +++++++++++------------ 2 files changed, 100 insertions(+), 41 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 99fc2d3ea4..108e771748 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -123,6 +123,39 @@ from spack.util.spack_yaml import syaml_dict from spack.version import * from spack.provider_index import ProviderIndex +__all__ = [ + 'Spec', + 'alldeps', + 'nolink', + 'nobuild', + 'canonical_deptype', + 'validate_deptype', + 'parse', + 'parse_anonymous_spec', + 'SpecError', + 'SpecParseError', + 'DuplicateDependencyError', + 'DuplicateVariantError', + 'DuplicateCompilerSpecError', + 'UnsupportedCompilerError', + 'UnknownVariantError', + 'DuplicateArchitectureError', + 'InconsistentSpecError', + 'InvalidDependencyError', + 'InvalidDependencyTypeError', + 'NoProviderError', + 'MultipleProviderError', + 'UnsatisfiableSpecError', + 'UnsatisfiableSpecNameError', + 'UnsatisfiableVersionSpecError', + 'UnsatisfiableCompilerSpecError', + 'UnsatisfiableVariantSpecError', + 'UnsatisfiableCompilerFlagSpecError', + 'UnsatisfiableArchitectureSpecError', + 'UnsatisfiableProviderSpecError', + 'UnsatisfiableDependencySpecError', + 'SpackYAMLError', + 'AmbiguousHashError'] # Valid pattern for an identifier in Spack identifier_re = r'\w[\w-]*' @@ -156,12 +189,45 @@ _any_version = VersionList([':']) # Special types of dependencies. alldeps = ('build', 'link', 'run') -nolink = ('build', 'run') +nolink = ('build', 'run') +nobuild = ('link', 'run') +norun = ('link', 'build') special_types = { 'alldeps': alldeps, 'nolink': nolink, + 'nobuild': nobuild, + 'norun': norun, } +legal_deps = tuple(special_types) + alldeps + + +def validate_deptype(deptype): + if isinstance(deptype, str): + if deptype not in legal_deps: + raise InvalidDependencyTypeError( + "Invalid dependency type: %s" % deptype) + + elif isinstance(deptype, (list, tuple)): + for t in deptype: + validate_deptype(t) + + elif deptype is None: + raise InvalidDependencyTypeError("deptype cannot be None!") + + +def canonical_deptype(deptype): + if deptype is None: + return alldeps + + elif isinstance(deptype, str): + return special_types.get(deptype, (deptype,)) + + elif isinstance(deptype, (tuple, list)): + return (sum((canonical_deptype(d) for d in deptype), ())) + + return deptype + def colorize_spec(spec): """Returns a spec colorized according to the colors specified in @@ -542,17 +608,8 @@ class Spec(object): raise InvalidDependencyException( self.name + " does not depend on " + comma_or(name)) - def _deptype_norm(self, deptype): - if deptype is None: - return alldeps - # Force deptype to be a set object so that we can do set intersections. - if isinstance(deptype, str): - # Support special deptypes. - return special_types.get(deptype, (deptype,)) - return deptype - def _find_deps(self, where, deptype): - deptype = self._deptype_norm(deptype) + deptype = canonical_deptype(deptype) return [dep.spec for dep in where.values() @@ -565,7 +622,7 @@ class Spec(object): return self._find_deps(self._dependents, deptype) def _find_deps_dict(self, where, deptype): - deptype = self._deptype_norm(deptype) + deptype = canonical_deptype(deptype) return dict((dep.spec.name, dep) for dep in where.values() @@ -2718,6 +2775,10 @@ class InvalidDependencyError(SpecError): of the package.""" +class InvalidDependencyTypeError(SpecError): + """Raised when a dependency type is not a legal Spack dep type.""" + + class NoProviderError(SpecError): """Raised when there is no package that provides a particular virtual dependency. @@ -2804,8 +2865,6 @@ class UnsatisfiableProviderSpecError(UnsatisfiableSpecError): # TODO: get rid of this and be more specific about particular incompatible # dep constraints - - class UnsatisfiableDependencySpecError(UnsatisfiableSpecError): """Raised when some dependency of constrained specs are incompatible""" def __init__(self, provided, required): diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 3079288c77..d4eb9e057f 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -24,34 +24,34 @@ ############################################################################## import unittest -import spack.spec +import spack.spec as sp from spack.parse import Token from spack.spec import * # Sample output for a complex lexing. -complex_lex = [Token(ID, 'mvapich_foo'), - Token(DEP), - Token(ID, '_openmpi'), - Token(AT), - Token(ID, '1.2'), - Token(COLON), - Token(ID, '1.4'), - Token(COMMA), - Token(ID, '1.6'), - Token(PCT), - Token(ID, 'intel'), - Token(AT), - Token(ID, '12.1'), - Token(COLON), - Token(ID, '12.6'), - Token(ON), - Token(ID, 'debug'), - Token(OFF), - Token(ID, 'qt_4'), - Token(DEP), - Token(ID, 'stackwalker'), - Token(AT), - Token(ID, '8.1_1e')] +complex_lex = [Token(sp.ID, 'mvapich_foo'), + Token(sp.DEP), + Token(sp.ID, '_openmpi'), + Token(sp.AT), + Token(sp.ID, '1.2'), + Token(sp.COLON), + Token(sp.ID, '1.4'), + Token(sp.COMMA), + Token(sp.ID, '1.6'), + Token(sp.PCT), + Token(sp.ID, 'intel'), + Token(sp.AT), + Token(sp.ID, '12.1'), + Token(sp.COLON), + Token(sp.ID, '12.6'), + Token(sp.ON), + Token(sp.ID, 'debug'), + Token(sp.OFF), + Token(sp.ID, 'qt_4'), + Token(sp.DEP), + Token(sp.ID, 'stackwalker'), + Token(sp.AT), + Token(sp.ID, '8.1_1e')] class SpecSyntaxTest(unittest.TestCase): @@ -74,16 +74,16 @@ class SpecSyntaxTest(unittest.TestCase): """ if spec is None: spec = expected - output = spack.spec.parse(spec) + output = sp.parse(spec) parsed = (" ".join(str(spec) for spec in output)) self.assertEqual(expected, parsed) def check_lex(self, tokens, spec): """Check that the provided spec parses to the provided token list.""" - lex_output = SpecLexer().lex(spec) + lex_output = sp.SpecLexer().lex(spec) for tok, spec_tok in zip(tokens, lex_output): - if tok.type == ID: + if tok.type == sp.ID: self.assertEqual(tok, spec_tok) else: # Only check the type for non-identifiers. -- cgit v1.2.3-60-g2f50 From 0d3d74e5c2f0ad76c818449f21f91d6c9ed4e74a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Sep 2016 10:22:46 -0400 Subject: Improvements to the Spack graph command. - Distinguish between static (package) and dynamic (spec) graphs. - static graphs ignore conditions and multiple instances (hashes) and plot raw dependencies among packages. - dynamic graphs include information from particular specs (instances of packages) and can have multiple instances with hashes. - Allow graphing all packages in the install DB. - useful for debugging. --- lib/spack/spack/cmd/graph.py | 40 ++++++++++++++++++++--- lib/spack/spack/graph.py | 78 ++++++++++++++++++++++++++++++-------------- lib/spack/spack/package.py | 14 ++++++++ 3 files changed, 103 insertions(+), 29 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 8faabfbb7b..6de1ed974b 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -24,8 +24,11 @@ ############################################################################## import argparse +import llnl.util.tty as tty + import spack import spack.cmd +from spack.spec import * from spack.graph import * description = "Generate graphs of package dependency relationships." @@ -43,8 +46,21 @@ def setup_parser(subparser): help="Generate graph in dot format and print to stdout.") subparser.add_argument( - '--concretize', action='store_true', - help="Concretize specs before graphing.") + '--normalize', action='store_true', + help="Skip concretization; only print normalized spec.") + + subparser.add_argument( + '-s', '--static', action='store_true', + help="Use static information from packages, not dynamic spec info.") + + subparser.add_argument( + '-i', '--installed', action='store_true', + help="Graph all installed specs in dot format (implies --dot).") + + subparser.add_argument( + '-t', '--deptype', action='store', + help="Comma-separated list of deptypes to traverse. default=%s." + % ','.join(alldeps)) subparser.add_argument( 'specs', nargs=argparse.REMAINDER, @@ -52,15 +68,29 @@ def setup_parser(subparser): def graph(parser, args): - specs = spack.cmd.parse_specs( - args.specs, normalize=True, concretize=args.concretize) + concretize = not args.normalize + if args.installed: + if args.specs: + tty.die("Can't specify specs with --installed") + args.dot = True + specs = spack.installed_db.query() + + else: + specs = spack.cmd.parse_specs( + args.specs, normalize=True, concretize=concretize) if not specs: setup_parser.parser.print_help() return 1 + deptype = alldeps + if args.deptype: + deptype = tuple(args.deptype.split(',')) + validate_deptype(deptype) + deptype = canonical_deptype(deptype) + if args.dot: # Dot graph only if asked for. - graph_dot(*specs) + graph_dot(specs, static=args.static, deptype=deptype) elif specs: # ascii is default: user doesn't need to provide it explicitly graph_ascii(specs[0], debug=spack.debug) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index b875e9da99..330e08cc71 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -67,8 +67,7 @@ from heapq import * from llnl.util.lang import * from llnl.util.tty.color import * -import spack -from spack.spec import Spec +from spack.spec import * __all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] @@ -501,7 +500,7 @@ def graph_ascii(spec, **kwargs): graph.write(spec, color=color, out=out) -def graph_dot(*specs, **kwargs): +def graph_dot(specs, deptype=None, static=False, out=None): """Generate a graph in dot format of all provided specs. Print out a dot formatted graph of all the dependencies between @@ -510,42 +509,73 @@ def graph_dot(*specs, **kwargs): spack graph --dot qt | dot -Tpdf > spack-graph.pdf """ - out = kwargs.pop('out', sys.stdout) - check_kwargs(kwargs, graph_dot) + if out is None: + out = sys.stdout + + if deptype is None: + deptype = alldeps out.write('digraph G {\n') - out.write(' label = "Spack Dependencies"\n') out.write(' labelloc = "b"\n') out.write(' rankdir = "LR"\n') out.write(' ranksep = "5"\n') + out.write('node[\n') + out.write(' fontname=Monaco,\n') + out.write(' penwidth=2,\n') + out.write(' fontsize=12,\n') + out.write(' margin=.1,\n') + out.write(' shape=box,\n') + out.write(' fillcolor=lightblue,\n') + out.write(' style="rounded,filled"]\n') + out.write('\n') - def quote(string): + def q(string): return '"%s"' % string if not specs: - specs = [p.name for p in spack.repo.all_packages()] - else: - roots = specs - specs = set() - for spec in roots: - specs.update(Spec(s.name) for s in spec.normalized().traverse()) + raise ValueError("Must provide specs ot graph_dot") + + # Static graph includes anything a package COULD depend on. + if static: + names = set.union(*[s.package.possible_dependencies() for s in specs]) + specs = [Spec(name) for name in names] + + labeled = set() - deps = [] + def label(key, label): + if key not in labeled: + out.write(' "%s" [label="%s"]\n' % (key, label)) + labeled.add(key) + + deps = set() for spec in specs: - out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name)) + if static: + out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name)) + + # Skip virtual specs (we'll find out about them from concrete ones. + if spec.virtual: + continue + + # Add edges for each depends_on in the package. + for dep_name, dep in spec.package.dependencies.iteritems(): + deps.add((spec.name, dep_name)) - # Skip virtual specs (we'll find out about them from concrete ones. - if spec.virtual: - continue + # If the package provides something, add an edge for that. + for provider in set(s.name for s in spec.package.provided): + deps.add((provider, spec.name)) + + else: + def key_label(s): + return s.dag_hash(), "%s-%s" % (s.name, s.dag_hash(7)) - # Add edges for each depends_on in the package. - for dep_name, dep in spec.package.dependencies.iteritems(): - deps.append((spec.name, dep_name)) + for s in spec.traverse(deptype=deptype): + skey, slabel = key_label(s) + out.write(' "%s" [label="%s"]\n' % (skey, slabel)) - # If the package provides something, add an edge for that. - for provider in set(s.name for s in spec.package.provided): - deps.append((provider, spec.name)) + for d in s.dependencies(deptype=deptype): + dkey, _ = key_label(d) + deps.add((skey, dkey)) out.write('\n') diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fe19e5d400..cffc795586 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -411,6 +411,20 @@ class Package(object): if self.is_extension: spack.repo.get(self.extendee_spec)._check_extendable() + def possible_dependencies(self, visited=None): + """Return set of possible transitive dependencies of this package.""" + if visited is None: + visited = set() + + visited.add(self.name) + for name in self.dependencies: + if name not in visited and not spack.spec.Spec(name).virtual: + pkg = spack.repo.get(name) + for name in pkg.possible_dependencies(visited): + visited.add(name) + + return visited + @property def package_dir(self): """Return the directory where the package.py file lives.""" -- cgit v1.2.3-60-g2f50 From f082d26dddcd63af21a6f72cacba54c643cf26ff Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Sep 2016 23:28:51 -0400 Subject: Fixes #1098: spack graph crashes for large graphs. - Fixed logic for collapsing backward edges - Last collapse now depends on whether prior step in left collapse sequence alrady did the collapse. --- lib/spack/spack/graph.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 330e08cc71..acb6ee50de 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -128,7 +128,7 @@ def find(seq, predicate): return -1 -# Names of different graph line states. We Record previous line +# Names of different graph line states. We record previous line # states so that we can easily determine what to do when connecting. states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states @@ -161,6 +161,9 @@ class AsciiGraph(object): def _write_edge(self, string, index, sub=0): """Write a colored edge to the output stream.""" + # Ignore empty frontier entries (they're just collapsed) + if not self._frontier[index]: + return name = self._frontier[index][sub] edge = "@%s{%s}" % (self._name_to_color[name], string) self._out.write(edge) @@ -419,20 +422,26 @@ class AsciiGraph(object): if back: back.sort() prev_ends = [] + collapse_l1 = False for j, (b, d) in enumerate(back): self._frontier[i].remove(d) if i - b > 1: - self._back_edge_line(prev_ends, b, i, False, - 'left-1') + collapse_l1 = any(not e for e in self._frontier) + self._back_edge_line( + prev_ends, b, i, collapse_l1, 'left-1') del prev_ends[:] prev_ends.append(b) # Check whether we did ALL the deps as back edges, # in which case we're done. - collapse = not self._frontier[i] - if collapse: + pop = not self._frontier[i] + collapse_l2 = pop + if collapse_l1: + collapse_l2 = False + if pop: self._frontier.pop(i) - self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2') + self._back_edge_line( + prev_ends, -1, -1, collapse_l2, 'left-2') elif len(self._frontier[i]) > 1: # Expand forward after doing all back connections -- cgit v1.2.3-60-g2f50 From 05d52752fff1a6435f9173a2e1a6c6ab45232c3c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Sep 2016 23:32:18 -0400 Subject: Make graph_ascii support deptypes. - fix deptype support - by default, graph command omits build depedencies - update docs to use deptype args --- .gitignore | 1 + lib/spack/docs/configuration.rst | 2 +- lib/spack/docs/packaging_guide.rst | 19 ++++++++++++++++--- lib/spack/spack/cmd/graph.py | 4 ++-- lib/spack/spack/graph.py | 32 ++++++++++++++------------------ lib/spack/spack/spec.py | 6 +++--- 6 files changed, 37 insertions(+), 27 deletions(-) (limited to 'lib') diff --git a/.gitignore b/.gitignore index 072bf30c07..e6200a0676 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +/db /var/spack/stage /var/spack/cache /var/spack/repos/*/index.yaml diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index ba534d1e62..6de823c845 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -207,7 +207,7 @@ supply ``-p`` to Spack on the command line, before any subcommands. ``spack --profile`` output looks like this: -.. command-output:: spack --profile graph dyninst +.. command-output:: spack --profile graph --deptype=nobuild dyninst :ellipsis: 25 The bottom of the output shows the top most time consuming functions, diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 0294f32748..70cd58f6c1 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2888,9 +2888,22 @@ dependency graph. For example: .. command-output:: spack graph mpileaks -At the top is the root package in the DAG, with dependency edges -emerging from it. On a color terminal, the edges are colored by which -dependency they lead to. +At the top is the root package in the DAG, with dependency edges emerging +from it. On a color terminal, the edges are colored by which dependency +they lead to. + +.. command-output:: spack graph --deptype=all mpileaks + +The ``deptype`` argument tells Spack what types of dependencies to graph. +By default it includes link and run dependencies but not build +dependencies. Supplying ``--deptype=all`` will show the build +dependencies as well. This is equivalent to +``--deptype=build,link,run``. Options for ``deptype`` include: + +* Any combination of ``build``, ``link``, and ``run`` separated by + commas. +* ``nobuild``, ``nolink``, ``norun`` to omit one type. +* ``all`` or ``alldeps`` for all types of dependencies. You can also use ``spack graph`` to generate graphs in the widely used `Dot `_ format. For diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 6de1ed974b..3f7ab8bc2f 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -83,7 +83,7 @@ def graph(parser, args): setup_parser.parser.print_help() return 1 - deptype = alldeps + deptype = nobuild if args.deptype: deptype = tuple(args.deptype.split(',')) validate_deptype(deptype) @@ -93,7 +93,7 @@ def graph(parser, args): graph_dot(specs, static=args.static, deptype=deptype) elif specs: # ascii is default: user doesn't need to provide it explicitly - graph_ascii(specs[0], debug=spack.debug) + graph_ascii(specs[0], debug=spack.debug, deptype=deptype) for spec in specs[1:]: print # extra line bt/w independent graphs graph_ascii(spec, debug=spack.debug) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index acb6ee50de..f474799275 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -72,16 +72,15 @@ from spack.spec import * __all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] -def topological_sort(spec, **kwargs): +def topological_sort(spec, reverse=False, deptype=None): """Topological sort for specs. Return a list of dependency specs sorted topologically. The spec argument is not modified in the process. """ - reverse = kwargs.get('reverse', False) - # XXX(deptype): iterate over a certain kind of dependency. Maybe color - # edges based on the type of dependency? + deptype = canonical_deptype(deptype) + if not reverse: parents = lambda s: s.dependents() children = lambda s: s.dependencies() @@ -90,7 +89,7 @@ def topological_sort(spec, **kwargs): children = lambda s: s.dependents() # Work on a copy so this is nondestructive. - spec = spec.copy() + spec = spec.copy(deps=deptype) nodes = spec.index() topo_order = [] @@ -142,6 +141,7 @@ class AsciiGraph(object): self.node_character = '*' self.debug = False self.indent = 0 + self.deptype = alldeps # These are colors in the order they'll be used for edges. # See llnl.util.tty.color for details on color characters. @@ -388,7 +388,7 @@ class AsciiGraph(object): self._out = ColorStream(sys.stdout, color=color) # We'll traverse the spec in topo order as we graph it. - topo_order = topological_sort(spec, reverse=True) + topo_order = topological_sort(spec, reverse=True, deptype=self.deptype) # Work on a copy to be nondestructive spec = spec.copy() @@ -484,27 +484,23 @@ class AsciiGraph(object): # Replace node with its dependencies self._frontier.pop(i) - if node.dependencies(): - deps = sorted((d.name for d in node.dependencies()), - reverse=True) + deps = node.dependencies(self.deptype) + if deps: + deps = sorted((d.name for d in deps), reverse=True) self._connect_deps(i, deps, "new-deps") # anywhere. elif self._frontier: self._collapse_line(i) -def graph_ascii(spec, **kwargs): - node_character = kwargs.get('node', 'o') - out = kwargs.pop('out', None) - debug = kwargs.pop('debug', False) - indent = kwargs.pop('indent', 0) - color = kwargs.pop('color', None) - check_kwargs(kwargs, graph_ascii) - +def graph_ascii(spec, node='o', out=None, debug=False, + indent=0, color=None, deptype=None): graph = AsciiGraph() graph.debug = debug graph.indent = indent - graph.node_character = node_character + graph.node_character = node + if deptype: + graph.deptype = canonical_deptype(deptype) graph.write(spec, color=color, out=out) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 108e771748..ba9cea876d 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -194,6 +194,7 @@ nobuild = ('link', 'run') norun = ('link', 'build') special_types = { 'alldeps': alldeps, + 'all': alldeps, # allow "all" as string but not symbol. 'nolink': nolink, 'nobuild': nobuild, 'norun': norun, @@ -1418,12 +1419,11 @@ class Spec(object): # parser doesn't allow it. Spack must be broken! raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) - def index(self): + def index(self, deptype=None): """Return DependencyMap that points to all the dependencies in this spec.""" dm = DependencyMap() - # XXX(deptype): use a deptype kwarg. - for spec in self.traverse(): + for spec in self.traverse(deptype=deptype): dm[spec.name] = spec return dm -- cgit v1.2.3-60-g2f50 From 6df84a794b27d3c16f5d72fc2e11f7f61b109859 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 28 Sep 2016 11:09:31 -0500 Subject: Add single letter arg flags for spack graph (#1871) --- lib/spack/spack/cmd/graph.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 3f7ab8bc2f..ed3aed1946 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -39,14 +39,14 @@ def setup_parser(subparser): method = subparser.add_mutually_exclusive_group() method.add_argument( - '--ascii', action='store_true', + '-a', '--ascii', action='store_true', help="Draw graph as ascii to stdout (default).") method.add_argument( - '--dot', action='store_true', + '-d', '--dot', action='store_true', help="Generate graph in dot format and print to stdout.") subparser.add_argument( - '--normalize', action='store_true', + '-n', '--normalize', action='store_true', help="Skip concretization; only print normalized spec.") subparser.add_argument( -- cgit v1.2.3-60-g2f50 From cb229f084239a1a8686785d3f453af5572d5f676 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 28 Sep 2016 15:00:26 -0400 Subject: Fixes #1720: spack reindex fails with invalid ref count. (#1867) * Fixes #1720: spack reindex fails with invalid ref count. - Database graph wasn't being built properly; dependencies were set up incorrectly in the nodes that ended up in the graph on reindex. - Reworked _add to increment ref count properly and to always build bottom-up to make the logic simpler to understand. * Add checks to ensure DB is a valid merkle tree. --- lib/spack/spack/database.py | 99 ++++++++++++++++++++++------------------ lib/spack/spack/package.py | 3 +- lib/spack/spack/test/database.py | 27 +++++++++-- 3 files changed, 80 insertions(+), 49 deletions(-) (limited to 'lib') diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index e450b4d424..1adad5e90b 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -50,7 +50,7 @@ from llnl.util.lock import * import spack.spec from spack.version import Version -from spack.spec import Spec +from spack.spec import * from spack.error import SpackError from spack.repository import UnknownPackageError import spack.util.spack_yaml as syaml @@ -64,6 +64,9 @@ _db_version = Version('0.9.2') # Default timeout for spack database locks is 5 min. _db_lock_timeout = 60 +# Types of dependencies tracked by the database +_tracked_deps = nobuild + def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -232,8 +235,6 @@ class Database(object): spec.format('$_$#'), dname, dhash[:7])) continue - # defensive copy (not sure everything handles extra - # parent links yet) child = data[dhash].spec spec._add_dependency(child, dtypes) @@ -328,7 +329,7 @@ class Database(object): self._data = data def reindex(self, directory_layout): - """Build database index from scratch based from a directory layout. + """Build database index from scratch based on a directory layout. Locks the DB if it isn't locked already. @@ -359,9 +360,6 @@ class Database(object): # Ask the directory layout to traverse the filesystem. for spec in directory_layout.all_specs(): - # Create a spec for each known package and add it. - path = directory_layout.path_for_spec(spec) - # Try to recover explicit value from old DB, but # default it to False if DB was corrupt. explicit = False @@ -370,7 +368,7 @@ class Database(object): if old_info is not None: explicit = old_info.explicit - self._add(spec, path, directory_layout, explicit=explicit) + self._add(spec, directory_layout, explicit=explicit) self._check_ref_counts() @@ -389,10 +387,7 @@ class Database(object): counts = {} for key, rec in self._data.items(): counts.setdefault(key, 0) - # XXX(deptype): This checks all dependencies, but build - # dependencies might be able to be dropped in the - # future. - for dep in rec.spec.dependencies(): + for dep in rec.spec.dependencies(_tracked_deps): dep_key = dep.dag_hash() counts.setdefault(dep_key, 0) counts[dep_key] += 1 @@ -450,52 +445,62 @@ class Database(object): # reindex() takes its own write lock, so no lock here. self.reindex(spack.install_layout) - def _add(self, spec, path, directory_layout=None, explicit=False): - """Add an install record for spec at path to the database. + def _add(self, spec, directory_layout=None, explicit=False): + """Add an install record for this spec to the database. - This assumes that the spec is not already installed. It - updates the ref counts on dependencies of the spec in the DB. + Assumes spec is installed in ``layout.path_for_spec(spec)``. - This operation is in-memory, and does not lock the DB. + Also ensures dependencies are present and updated in the DB as + either intsalled or missing. """ - key = spec.dag_hash() - if key in self._data: - rec = self._data[key] - rec.installed = True + if not spec.concrete: + raise NonConcreteSpecAddError( + "Specs added to DB must be concrete.") - # TODO: this overwrites a previous install path (when path != - # self._data[key].path), and the old path still has a - # dependent in the DB. We could consider re-RPATH-ing the - # dependents. This case is probably infrequent and may not be - # worth fixing, but this is where we can discover it. - rec.path = path - - else: - self._data[key] = InstallRecord(spec, path, True, - explicit=explicit) - for dep in spec.dependencies(('link', 'run')): - self._increment_ref_count(dep, directory_layout) + for dep in spec.dependencies(_tracked_deps): + dkey = dep.dag_hash() + if dkey not in self._data: + self._add(dep, directory_layout, explicit=False) - def _increment_ref_count(self, spec, directory_layout=None): - """Recursively examine dependencies and update their DB entries.""" key = spec.dag_hash() if key not in self._data: installed = False path = None if directory_layout: path = directory_layout.path_for_spec(spec) - installed = os.path.isdir(path) + try: + directory_layout.check_installed(spec) + installed = True + except DirectoryLayoutError as e: + tty.warn( + 'Dependency missing due to corrupt install directory:', + path, str(e)) + + # Create a new install record with no deps initially. + new_spec = spec.copy(deps=False) + self._data[key] = InstallRecord( + new_spec, path, installed, ref_count=0, explicit=explicit) + + # Connect dependencies from the DB to the new copy. + for name, dep in spec.dependencies_dict(_tracked_deps).iteritems(): + dkey = dep.spec.dag_hash() + new_spec._add_dependency(self._data[dkey].spec, dep.deptypes) + self._data[dkey].ref_count += 1 + + # Mark concrete once everything is built, and preserve + # the original hash of concrete specs. + new_spec._mark_concrete() + new_spec._hash = key - self._data[key] = InstallRecord(spec.copy(), path, installed) - - for dep in spec.dependencies(('link', 'run')): - self._increment_ref_count(dep) + else: + # If it is already there, mark it as installed. + self._data[key].installed = True - self._data[key].ref_count += 1 + self._data[key].explicit = explicit @_autospec - def add(self, spec, path, explicit=False): + def add(self, spec, directory_layout, explicit=False): """Add spec at path to database, locking and reading DB to sync. ``add()`` will lock and read from the DB on disk. @@ -504,7 +509,7 @@ class Database(object): # TODO: ensure that spec is concrete? # Entire add is transactional. with self.write_transaction(): - self._add(spec, path, explicit=explicit) + self._add(spec, directory_layout, explicit=explicit) def _get_matching_spec_key(self, spec, **kwargs): """Get the exact spec OR get a single spec that matches.""" @@ -534,7 +539,7 @@ class Database(object): if rec.ref_count == 0 and not rec.installed: del self._data[key] - for dep in spec.dependencies('link'): + for dep in spec.dependencies(_tracked_deps): self._decrement_ref_count(dep) def _remove(self, spec): @@ -548,7 +553,7 @@ class Database(object): return rec.spec del self._data[key] - for dep in rec.spec.dependencies('link'): + for dep in rec.spec.dependencies(_tracked_deps): self._decrement_ref_count(dep) # Returns the concrete spec so we know it in the case where a @@ -657,6 +662,10 @@ class CorruptDatabaseError(SpackError): """Raised when errors are found while reading the database.""" +class NonConcreteSpecAddError(SpackError): + """Raised when attemptint to add non-concrete spec to DB.""" + + class InvalidDatabaseVersionError(SpackError): def __init__(self, expected, found): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index cffc795586..cb7f6b3f39 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1053,7 +1053,8 @@ class Package(object): # note: PARENT of the build process adds the new package to # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, self.prefix, explicit=explicit) + spack.installed_db.add( + self.spec, spack.install_layout, explicit=explicit) def sanity_check_prefix(self): """This function checks whether install succeeded.""" diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index b114bbacb8..4395f17f97 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -124,6 +124,19 @@ class DatabaseTest(MockDatabase): self.assertEqual(new_rec.path, rec.path) self.assertEqual(new_rec.installed, rec.installed) + def _check_merkleiness(self): + """Ensure the spack database is a valid merkle graph.""" + all_specs = spack.installed_db.query(installed=any) + + seen = {} + for spec in all_specs: + for dep in spec.dependencies(): + hash_key = dep.dag_hash() + if hash_key not in seen: + seen[hash_key] = id(dep) + else: + self.assertEqual(seen[hash_key], id(dep)) + def _check_db_sanity(self): """Utiilty function to check db against install layout.""" expected = sorted(spack.install_layout.all_specs()) @@ -133,10 +146,17 @@ class DatabaseTest(MockDatabase): for e, a in zip(expected, actual): self.assertEqual(e, a) + self._check_merkleiness() + def test_020_db_sanity(self): """Make sure query() returns what's actually in the db.""" self._check_db_sanity() + def test_025_reindex(self): + """Make sure reindex works and ref counts are valid.""" + spack.installed_db.reindex(spack.install_layout) + self._check_db_sanity() + def test_030_db_sanity_from_another_process(self): def read_and_modify(): self._check_db_sanity() # check that other process can read DB @@ -203,9 +223,10 @@ class DatabaseTest(MockDatabase): self.assertTrue(concrete_spec not in remaining) # add it back and make sure everything is ok. - self.installed_db.add(concrete_spec, "") + self.installed_db.add(concrete_spec, spack.install_layout) installed = self.installed_db.query() - self.assertEqual(len(installed), len(original)) + self.assertTrue(concrete_spec in installed) + self.assertEqual(installed, original) # sanity check against direcory layout and check ref counts. self._check_db_sanity() @@ -233,7 +254,7 @@ class DatabaseTest(MockDatabase): self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1) # Put the spec back - self.installed_db.add(rec.spec, rec.path) + self.installed_db.add(rec.spec, spack.install_layout) # record is present again self.assertEqual( -- cgit v1.2.3-60-g2f50 From 295742e2b56b8731ff4651354cb754b2176fb6c2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 28 Sep 2016 15:18:52 -0400 Subject: Fixes #1860. Subversion builds sequentially due to race. --- lib/spack/spack/graph.py | 2 +- var/spack/repos/builtin/packages/subversion/package.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index f474799275..9753741c61 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -522,7 +522,7 @@ def graph_dot(specs, deptype=None, static=False, out=None): out.write('digraph G {\n') out.write(' labelloc = "b"\n') - out.write(' rankdir = "LR"\n') + out.write(' rankdir = "TB"\n') out.write(' ranksep = "5"\n') out.write('node[\n') out.write(' fontname=Monaco,\n') diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py index 26d143e4aa..02b7d96378 100644 --- a/var/spack/repos/builtin/packages/subversion/package.py +++ b/var/spack/repos/builtin/packages/subversion/package.py @@ -46,6 +46,9 @@ class Subversion(Package): # depends_on('perl') # depends_on('ruby') + # Installation has race cases. + parallel = False + def install(self, spec, prefix): # configure, build, install: -- cgit v1.2.3-60-g2f50 From 8d1ec0df3d4af0f6385ccd017aaad41d82cf96ef Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Fri, 30 Sep 2016 18:45:08 +0200 Subject: Fix read locks on read-only file systems (#1857) --- lib/spack/llnl/util/lock.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'lib') diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index aa8272d5fe..b5e3a3a8f8 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -70,7 +70,8 @@ class Lock(object): while (time.time() - start_time) < timeout: try: if self._fd is None: - self._fd = os.open(self._file_path, os.O_RDWR) + mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY + self._fd = os.open(self._file_path, mode) fcntl.lockf(self._fd, op | fcntl.LOCK_NB) if op == fcntl.LOCK_EX: -- cgit v1.2.3-60-g2f50 From 36c79e9df61b8a3dbddc6a456a3a5ce3cc7a073c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 4 Oct 2016 01:49:07 -0700 Subject: Fixes #1893: missing import --- lib/spack/spack/database.py | 1 + 1 file changed, 1 insertion(+) (limited to 'lib') diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 1adad5e90b..f73d3765c8 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -49,6 +49,7 @@ from llnl.util.filesystem import * from llnl.util.lock import * import spack.spec +from spack.directory_layout import DirectoryLayoutError from spack.version import Version from spack.spec import * from spack.error import SpackError -- cgit v1.2.3-60-g2f50 From e9d4780bbc307f37e6385fc80c8fea1abaab5782 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 4 Oct 2016 09:40:28 -0700 Subject: Rework build environment and cc to use smaller RPATHs. (#1894) - Fixed up dependency management so that: - build deps go in PATH and -I - link deps go in -L args - only *immediate* link deps are RPATH'd The latter reduces the number of libraries that need to be added to DT_NEEDED / LC_RPATH. This removes redundant RPATHs to transitive dependencies. --- lib/spack/env/cc | 32 ++++++++++++----- lib/spack/spack/build_environment.py | 23 ++++++++++--- lib/spack/spack/test/cc.py | 66 ++++++++++++++++++++++++++++++++++++ 3 files changed, 108 insertions(+), 13 deletions(-) (limited to 'lib') diff --git a/lib/spack/env/cc b/lib/spack/env/cc index c6bb50d261..4b8922178a 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -266,22 +266,38 @@ for dep in "${deps[@]}"; do # Prepend lib and RPATH directories if [[ -d $dep/lib ]]; then if [[ $mode == ccld ]]; then - $add_rpaths && args=("$rpath$dep/lib" "${args[@]}") - args=("-L$dep/lib" "${args[@]}") + if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then + $add_rpaths && args=("$rpath$dep/lib" "${args[@]}") + fi + if [[ $SPACK_LINK_DEPS == *$dep* ]]; then + args=("-L$dep/lib" "${args[@]}") + fi elif [[ $mode == ld ]]; then - $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}") - args=("-L$dep/lib" "${args[@]}") + if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then + $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}") + fi + if [[ $SPACK_LINK_DEPS == *$dep* ]]; then + args=("-L$dep/lib" "${args[@]}") + fi fi fi # Prepend lib64 and RPATH directories if [[ -d $dep/lib64 ]]; then if [[ $mode == ccld ]]; then - $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}") - args=("-L$dep/lib64" "${args[@]}") + if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then + $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}") + fi + if [[ $SPACK_LINK_DEPS == *$dep* ]]; then + args=("-L$dep/lib64" "${args[@]}") + fi elif [[ $mode == ld ]]; then - $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}") - args=("-L$dep/lib64" "${args[@]}") + if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then + $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}") + fi + if [[ $SPACK_LINK_DEPS == *$dep* ]]; then + args=("-L$dep/lib64" "${args[@]}") + fi fi fi done diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 9b5ed367d1..c3fa7418e8 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -77,6 +77,8 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE' # SPACK_ENV_PATH = 'SPACK_ENV_PATH' SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' +SPACK_RPATH_DEPS = 'SPACK_RPATH_DEPS' +SPACK_LINK_DEPS = 'SPACK_LINK_DEPS' SPACK_PREFIX = 'SPACK_PREFIX' SPACK_INSTALL = 'SPACK_INSTALL' SPACK_DEBUG = 'SPACK_DEBUG' @@ -254,9 +256,15 @@ def set_build_environment_variables(pkg, env, dirty=False): env.set_path(SPACK_ENV_PATH, env_paths) # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES - dep_prefixes = [d.prefix - for d in pkg.spec.traverse(root=False, deptype='build')] + dep_prefixes = [d.prefix for d in + pkg.spec.traverse(root=False, deptype=('build', 'link'))] env.set_path(SPACK_DEPENDENCIES, dep_prefixes) + + # These variables control compiler wrapper behavior + env.set_path(SPACK_RPATH_DEPS, [d.prefix for d in get_rpath_deps(pkg)]) + env.set_path(SPACK_LINK_DEPS, [ + d.prefix for d in pkg.spec.traverse(root=False, deptype=('link'))]) + # Add dependencies to CMAKE_PREFIX_PATH env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) @@ -288,8 +296,8 @@ def set_build_environment_variables(pkg, env, dirty=False): env.remove_path('PATH', p) # Add bin directories from dependencies to the PATH for the build. - bin_dirs = reversed( - filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes])) + bin_dirs = reversed(filter(os.path.isdir, [ + '%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')])) for item in bin_dirs: env.prepend_path('PATH', item) @@ -382,10 +390,15 @@ def set_module_variables_for_package(pkg, module): m.dso_suffix = dso_suffix +def get_rpath_deps(pkg): + """We only need to RPATH immediate dependencies.""" + return pkg.spec.dependencies(deptype='link') + + def get_rpaths(pkg): """Get a list of all the rpaths for a package.""" rpaths = [pkg.prefix.lib, pkg.prefix.lib64] - deps = pkg.spec.dependencies(deptype='link') + deps = get_rpath_deps(pkg) rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib)) rpaths.extend(d.prefix.lib64 for d in deps diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index f3e4bb31d2..73c711724c 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -223,6 +223,8 @@ class CompilerTest(unittest.TestCase): def test_dep_include(self): """Ensure a single dependency include directory is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep4 + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, self.realcc + ' ' + '-Wl,-rpath,' + self.prefix + '/lib ' + @@ -233,6 +235,8 @@ class CompilerTest(unittest.TestCase): def test_dep_lib(self): """Ensure a single dependency RPATH is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep2 + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, self.realcc + ' ' + '-Wl,-rpath,' + self.prefix + '/lib ' + @@ -241,10 +245,34 @@ class CompilerTest(unittest.TestCase): '-Wl,-rpath,' + self.dep2 + '/lib64 ' + ' '.join(test_command)) + def test_dep_lib_no_rpath(self): + """Ensure a single dependency link flag is added with no dep RPATH.""" + os.environ['SPACK_DEPENDENCIES'] = self.dep2 + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-L' + self.dep2 + '/lib64 ' + + ' '.join(test_command)) + + def test_dep_lib_no_lib(self): + """Ensure a single dependency RPATH is added with no -L.""" + os.environ['SPACK_DEPENDENCIES'] = self.dep2 + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-Wl,-rpath,' + self.dep2 + '/lib64 ' + + ' '.join(test_command)) + def test_all_deps(self): """Ensure includes and RPATHs for all deps are added. """ os.environ['SPACK_DEPENDENCIES'] = ':'.join([ self.dep1, self.dep2, self.dep3, self.dep4]) + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] # This is probably more constrained than it needs to be; it # checks order within prepended args and doesn't strictly have @@ -273,6 +301,8 @@ class CompilerTest(unittest.TestCase): """Ensure no (extra) -I args or -Wl, are passed in ld mode.""" os.environ['SPACK_DEPENDENCIES'] = ':'.join([ self.dep1, self.dep2, self.dep3, self.dep4]) + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_ld('dump-args', test_command, 'ld ' + @@ -290,10 +320,46 @@ class CompilerTest(unittest.TestCase): ' '.join(test_command)) + def test_ld_deps_no_rpath(self): + """Ensure SPACK_RPATH_DEPS controls RPATHs for ld.""" + os.environ['SPACK_DEPENDENCIES'] = ':'.join([ + self.dep1, self.dep2, self.dep3, self.dep4]) + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + + self.check_ld('dump-args', test_command, + 'ld ' + + '-rpath ' + self.prefix + '/lib ' + + '-rpath ' + self.prefix + '/lib64 ' + + + '-L' + self.dep3 + '/lib64 ' + + '-L' + self.dep2 + '/lib64 ' + + '-L' + self.dep1 + '/lib ' + + + ' '.join(test_command)) + + def test_ld_deps_no_link(self): + """Ensure SPACK_LINK_DEPS controls -L for ld.""" + os.environ['SPACK_DEPENDENCIES'] = ':'.join([ + self.dep1, self.dep2, self.dep3, self.dep4]) + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + + self.check_ld('dump-args', test_command, + 'ld ' + + '-rpath ' + self.prefix + '/lib ' + + '-rpath ' + self.prefix + '/lib64 ' + + + '-rpath ' + self.dep3 + '/lib64 ' + + '-rpath ' + self.dep2 + '/lib64 ' + + '-rpath ' + self.dep1 + '/lib ' + + + ' '.join(test_command)) + def test_ld_deps_reentrant(self): """Make sure ld -r is handled correctly on OS's where it doesn't support rpaths.""" os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1]) + os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] + os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64" reentrant_test_command = ['-r'] + test_command -- cgit v1.2.3-60-g2f50 From 544fa3eddb2e0e3830439204888a58a9c0e186e2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 4 Oct 2016 12:37:57 -0700 Subject: Fix some documentation build warnings. (#1902) - Fix issue with package_list.py regeneration confusing Sphinx. - Add -E option to avoid caching and make Sphinx happy. --- lib/spack/docs/Makefile | 2 +- lib/spack/docs/command_index.in | 3 +-- lib/spack/docs/conf.py | 23 +++++++++++------------ 3 files changed, 13 insertions(+), 15 deletions(-) (limited to 'lib') diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile index bdbdab7e8b..1b56959aad 100644 --- a/lib/spack/docs/Makefile +++ b/lib/spack/docs/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -E SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in index 94cdf38109..8c5c97dd9c 100644 --- a/lib/spack/docs/command_index.in +++ b/lib/spack/docs/command_index.in @@ -1,5 +1,4 @@ -.. _command_index: - +================= Command index ================= diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index ed3e5811bc..57469964ee 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -1,10 +1,7 @@ # flake8: noqa ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# # This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# Created by Todd Gamblin, tgamblin@llnl.gov. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack @@ -70,9 +67,10 @@ os.environ['COLIFY_SIZE'] = '25x120' # # Generate package list using spack command # -with open('package_list.rst', 'w') as plist_file: - subprocess.Popen( - [spack_root + '/bin/spack', 'package-list'], stdout=plist_file) +if not os.path.exists('package_list.rst'): + with open('package_list.rst', 'w') as plist_file: + subprocess.Popen( + [spack_root + '/bin/spack', 'package-list'], stdout=plist_file) # # Find all the `spack-*` references and add them to a command index @@ -85,11 +83,12 @@ for filename in glob('*rst'): if match: command_names.append(match.group(1).strip()) -shutil.copy('command_index.in', 'command_index.rst') -with open('command_index.rst', 'a') as index: - index.write('\n') - for cmd in sorted(command_names): - index.write(' * :ref:`%s`\n' % cmd) +if not os.path.exists('command_index.rst'): + shutil.copy('command_index.in', 'command_index.rst') + with open('command_index.rst', 'a') as index: + index.write('\n') + for cmd in sorted(command_names): + index.write(' * :ref:`%s`\n' % cmd) # Run sphinx-apidoc -- cgit v1.2.3-60-g2f50 From bff1656a1a842b4de217129cc8cba732a30928a7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 4 Oct 2016 15:36:37 -0700 Subject: Read-only locks should close fd before opening for write. (#1906) - Fixes bad file descriptor error in lock acquire, #1904 - Fix bug introduced in previous PR #1857 - Backported fix from soon-to-be merged fine-grained DB locking branch. --- lib/spack/llnl/util/lock.py | 8 ++++++++ lib/spack/spack/test/lock.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) (limited to 'lib') diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index b5e3a3a8f8..f5f53101ae 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -69,6 +69,14 @@ class Lock(object): start_time = time.time() while (time.time() - start_time) < timeout: try: + # If this is already open read-only and we want to + # upgrade to an exclusive write lock, close first. + if self._fd is not None: + flags = fcntl.fcntl(self._fd, fcntl.F_GETFL) + if op == fcntl.LOCK_EX and flags | os.O_RDONLY: + os.close(self._fd) + self._fd = None + if self._fd is None: mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY self._fd = os.open(self._file_path, mode) diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index fb96539897..32cbe13ce1 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -157,6 +157,35 @@ class LockTest(unittest.TestCase): self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + # + # Test that read can be upgraded to write. + # + def test_upgrade_read_to_write(self): + # ensure lock file exists the first time, so we open it read-only + # to begin wtih. + touch(self.lock_path) + + lock = Lock(self.lock_path) + self.assertTrue(lock._reads == 0) + self.assertTrue(lock._writes == 0) + + lock.acquire_read() + self.assertTrue(lock._reads == 1) + self.assertTrue(lock._writes == 0) + + lock.acquire_write() + self.assertTrue(lock._reads == 1) + self.assertTrue(lock._writes == 1) + + lock.release_write() + self.assertTrue(lock._reads == 1) + self.assertTrue(lock._writes == 0) + + lock.release_read() + self.assertTrue(lock._reads == 0) + self.assertTrue(lock._writes == 0) + self.assertTrue(lock._fd is None) + # # Longer test case that ensures locks are reusable. Ordering is # enforced by barriers throughout -- steps are shown with numbers. -- cgit v1.2.3-60-g2f50