From 132c32076affebe1dd22b833befebe4e57dbb30e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 28 Oct 2014 16:44:35 -0700 Subject: Add Muster parallel clustering library. --- var/spack/packages/muster/package.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/packages/muster/package.py diff --git a/var/spack/packages/muster/package.py b/var/spack/packages/muster/package.py new file mode 100644 index 0000000000..31d03f4b45 --- /dev/null +++ b/var/spack/packages/muster/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Muster(Package): + """The Muster library provides implementations of sequential and + parallel K-Medoids clustering algorithms. It is intended as a + general framework for parallel cluster analysis, particularly + for performance data analysis on systems with very large + numbers of processes. + """ + homepage = "https://github.com/scalability-llnl/muster" + url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz" + + version('1.0', '2eec6979a4a36d3a65a792d12969be16') + + depends_on("boost") + depends_on("mpi") + + def install(self, spec, prefix): + cmake(".", *std_cmake_args) + make() + make("install") -- cgit v1.2.3-70-g09d2 From f60fd330cb1a9e5bc420e3a628ae964198083e00 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 30 Oct 2014 15:00:02 -0700 Subject: Better error messages for extension() --- lib/spack/spack/util/compression.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index b4db266fd3..fd17785ad0 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -61,13 +61,15 @@ def strip_extension(path): def extension(path): """Get the archive extension for a path.""" + if path is None: + raise ValueError("Can't call extension() on None") # Strip sourceforge suffix. if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path): path = os.path.dirname(path) - for type in ALLOWED_ARCHIVE_TYPES: - suffix = r'\.%s$' % type + for t in ALLOWED_ARCHIVE_TYPES: + suffix = r'\.%s$' % t if re.search(suffix, path): - return type + return t return None -- cgit v1.2.3-70-g09d2 From 85a14b68b75ef4e024cc84ab8ebdc4e9de4a23e1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 31 Oct 2014 10:30:58 -0700 Subject: spack compiler add checks for access before listing directories. --- lib/spack/spack/compiler.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 35e3b898ec..8debf518c7 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -169,6 +169,10 @@ class Compiler(object): checks = [] for directory in path: + if not (os.path.isdir(directory) and + os.access(directory, os.R_OK | os.X_OK)): + continue + files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) -- cgit v1.2.3-70-g09d2 From 3db22a4e3317cb58e711efef71850eac85c4c48a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 1 Nov 2014 14:58:13 -0700 Subject: Sane self.url for packages (reflects current version) --- lib/spack/spack/error.py | 6 ++++++ lib/spack/spack/fetch_strategy.py | 2 +- lib/spack/spack/package.py | 35 +++++++++++++++++++---------------- lib/spack/spack/stage.py | 3 ++- 4 files changed, 28 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 8bcbd83c28..e483ea613b 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -33,6 +33,12 @@ class SpackError(Exception): self.long_message = long_message + def __str__(self): + msg = self.message + if self.long_message: + msg += "\n %s" % self.long_message + return msg + class UnsupportedPlatformError(SpackError): """Raised by packages when a platform is not supported""" def __init__(self, message): diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 98c78c2e08..80e2cdf413 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -580,7 +580,7 @@ def for_package_version(pkg, version): version() in the package description.""" # If it's not a known version, extrapolate one. if not version in pkg.versions: - url = pkg.url_for_verison(version) + url = pkg.url_for_version(version) if not url: raise InvalidArgsError(pkg, version) return URLFetchStrategy(url) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 649e772a10..7cf94ed1ef 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -368,12 +368,19 @@ class Package(object): # stage used to build this package. self._stage = None - # If there's no default URL provided, set this package's url to None - if not hasattr(self, 'url'): - self.url = None - - # Init fetch strategy to None + # Init fetch strategy and url to None self._fetcher = None + self.url = None + + # Fix up self.url if this package fetches with a URLFetchStrategy. + # This makes self.url behave sanely. + if self.spec.versions.concrete: + # TODO: this is a really roundabout way of determining the type of fetch to do. + # TODO: figure out a more sane fetch strategy/package init order + # TODO: (right now it's conflated with stage, package, and the tests make assumptions) + f = fs.for_package_version(self, self.version) + if isinstance(f, fs.URLFetchStrategy): + self.url = self.url_for_version(self.spec.version) # Set a default list URL (place to find available versions) if not hasattr(self, 'list_url'): @@ -410,7 +417,7 @@ class Package(object): *higher* URL, and if that isn't there raises an error. """ version_urls = self.version_urls() - url = self.url + url = getattr(self.__class__, 'url', None) for v in version_urls: if v > version and url: @@ -420,21 +427,15 @@ class Package(object): return url - def has_url(self): - """Returns whether there is a URL available for this package. - If there isn't, it's probably fetched some other way (version - control, etc.)""" - return self.url or self.version_urls() - - # TODO: move this out of here and into some URL extrapolation module? def url_for_version(self, version): """Returns a URL that you can download a new version of this package from.""" if not isinstance(version, Version): version = Version(version) - if not self.has_url(): - raise NoURLError(self.__class__) + cls = self.__class__ + if not (hasattr(cls, 'url') or self.version_urls()): + raise NoURLError(cls) # If we have a specific URL for this version, don't extrapolate. version_urls = self.version_urls() @@ -477,7 +478,7 @@ class Package(object): def mirror_path(self): """Get path to this package's archive in a mirror.""" filename = "%s-%s." % (self.name, self.version) - filename += extension(self.url) if self.has_url() else "tar.gz" + filename += extension(self.url) if self.url else "tar.gz" return "%s/%s" % (self.name, filename) @@ -709,6 +710,8 @@ class Package(object): tty.msg("%s is already installed in %s." % (self.name, self.prefix)) return + tty.msg("Installing %s" % self.name) + if not ignore_deps: self.do_install_dependencies() diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b371761785..f09346ab9b 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -257,7 +257,8 @@ class Stage(object): fetcher.fetch() break except spack.error.SpackError, e: - tty.msg("Fetching %s failed." % fetcher) + tty.msg("Fetching from %s failed." % fetcher) + tty.debug(e) continue -- cgit v1.2.3-70-g09d2 From a5859b0b051e5d48c9dca59502f2133dda8a255c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 1 Nov 2014 15:59:29 -0700 Subject: Add ability to get subparser by name from argparse --- lib/spack/external/argparse.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py index 42b64ee7be..394e5da152 100644 --- a/lib/spack/external/argparse.py +++ b/lib/spack/external/argparse.py @@ -1708,6 +1708,21 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): self._positionals._add_action(action) return action + + def get_subparser(self, name): + """Gets a subparser added with the supplied name. + This is an extension to the standard argparse API. + """ + subpasrsers_actions = [ + action for action in self._actions + if isinstance(action, _SubParsersAction)] + for action in subpasrsers_actions: + for choice, subparser in action.choices.items(): + if choice == name: + return subparser + return None + + def _get_optional_actions(self): return [action for action in self._actions -- cgit v1.2.3-70-g09d2 From 8f9de1786943362eb3d7e719c2fd05dccfebfae0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 1 Nov 2014 16:03:09 -0700 Subject: "spack info -r" is now "spack package-list" - too much going on in this command, and it made subcommand parsing weird. - information printed is the same but info and package-list are really different commands. --- lib/spack/docs/Makefile | 2 +- lib/spack/spack/cmd/info.py | 92 ++---------------------------- lib/spack/spack/cmd/package-list.py | 108 ++++++++++++++++++++++++++++++++++++ 3 files changed, 114 insertions(+), 88 deletions(-) create mode 100644 lib/spack/spack/cmd/package-list.py diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile index e3068ea10c..a660e1255d 100644 --- a/lib/spack/docs/Makefile +++ b/lib/spack/docs/Makefile @@ -25,7 +25,7 @@ all: html # This autogenerates a package list. # package_list: - spack info -r > package_list.rst + spack package-list > package_list.rst # # This creates a git repository and commits generated html docs. diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 29568b8c5d..3e4ff627d5 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -22,94 +22,18 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import re -import textwrap -from StringIO import StringIO from llnl.util.tty.colify import * import spack import spack.fetch_strategy as fs description = "Get detailed information on a particular package" -def setup_parser(subparser): - subparser.add_argument('-r', '--rst', action='store_true', - help="List all packages in reStructured text, for docs.") - subparser.add_argument('name', metavar="PACKAGE", nargs='?', help="name of packages to get info on") - - -def format_doc(pkg, **kwargs): - """Wrap doc string at 72 characters and format nicely""" - indent = kwargs.get('indent', 0) - - if not pkg.__doc__: - return "" - - doc = re.sub(r'\s+', ' ', pkg.__doc__) - lines = textwrap.wrap(doc, 72) - results = StringIO() - for line in lines: - results.write((" " * indent) + line + "\n") - return results.getvalue() - - -def github_url(pkg): - """Link to a package file on github.""" - return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" % - pkg.name) - - -def rst_table(elts): - """Print out a RST-style table.""" - cols = StringIO() - ncol, widths = colify(elts, output=cols, tty=True) - header = " ".join("=" * (w-1) for w in widths) - return "%s\n%s%s" % (header, cols.getvalue(), header) - -def info_rst(): - """Print out information on all packages in restructured text.""" - pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower()) - - print "Package List" - print "==================" - - print "This is a list of things you can install using Spack. It is" - print "automatically generated based on the packages in the latest Spack" - print "release." - print - - print "Spack currently has %d mainline packages:" % len(pkgs) - print - print rst_table("`%s`_" % p.name for p in pkgs) - print - print "-----" - - # Output some text for each package. - for pkg in pkgs: - print - print ".. _%s:" % pkg.name - print - print pkg.name - print "-" * len(pkg.name) - print "Links" - print " * `Homepage <%s>`__" % pkg.homepage - print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg)) - print - if pkg.versions: - print "Versions:" - print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions))) - if pkg.dependencies: - print "Dependencies" - print " " + ", ".join("`%s`_" % d if d != "mpi" else d - for d in pkg.dependencies) - print - print "Description" - print format_doc(pkg, indent=2) - print - print "-----" +def setup_parser(subparser): + subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.") -def info_text(pkg): +def print_text_info(pkg): """Print out a plain text description of a package.""" print "Package: ", pkg.name print "Homepage: ", pkg.homepage @@ -150,11 +74,5 @@ def info_text(pkg): def info(parser, args): - if args.rst: - info_rst() - - else: - if not args.name: - tty.die("You must supply a package name.") - pkg = spack.db.get(args.name) - info_text(pkg) + pkg = spack.db.get(args.name) + print_text_info(pkg) diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py new file mode 100644 index 0000000000..aa576bddc2 --- /dev/null +++ b/lib/spack/spack/cmd/package-list.py @@ -0,0 +1,108 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import re +import textwrap +from StringIO import StringIO +import llnl.util.tty as tty +from llnl.util.tty.colify import * +import spack + +description = "Print a list of all packages in reStructuredText." + + +def format_doc(pkg, **kwargs): + """Wrap doc string at 72 characters and format nicely""" + indent = kwargs.get('indent', 0) + + if not pkg.__doc__: + return "" + + doc = re.sub(r'\s+', ' ', pkg.__doc__) + lines = textwrap.wrap(doc, 72) + results = StringIO() + for line in lines: + results.write((" " * indent) + line + "\n") + return results.getvalue() + + +def github_url(pkg): + """Link to a package file on github.""" + return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" % + pkg.name) + + +def rst_table(elts): + """Print out a RST-style table.""" + cols = StringIO() + ncol, widths = colify(elts, output=cols, tty=True) + header = " ".join("=" * (w-1) for w in widths) + return "%s\n%s%s" % (header, cols.getvalue(), header) + + +def print_rst_package_list(): + """Print out information on all packages in restructured text.""" + pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower()) + + print "Package List" + print "==================" + + print "This is a list of things you can install using Spack. It is" + print "automatically generated based on the packages in the latest Spack" + print "release." + print + + print "Spack currently has %d mainline packages:" % len(pkgs) + print + print rst_table("`%s`_" % p.name for p in pkgs) + print + print "-----" + + # Output some text for each package. + for pkg in pkgs: + print + print ".. _%s:" % pkg.name + print + print pkg.name + print "-" * len(pkg.name) + print "Links" + print " * `Homepage <%s>`__" % pkg.homepage + print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg)) + print + if pkg.versions: + print "Versions:" + print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions))) + if pkg.dependencies: + print "Dependencies" + print " " + ", ".join("`%s`_" % d if d != "mpi" else d + for d in pkg.dependencies) + print + print "Description" + print format_doc(pkg, indent=2) + print + print "-----" + + +def package_list(parser, args): + print_rst_package_list() -- cgit v1.2.3-70-g09d2 From 8c8fc749be47aeb1aee74f74b7129f233afdbe51 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 30 Oct 2014 15:02:06 -0700 Subject: Initial versions of python and libffi. --- var/spack/packages/libffi/package.py | 16 ++++++++++++++++ var/spack/packages/python/package.py | 16 ++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 var/spack/packages/libffi/package.py create mode 100644 var/spack/packages/python/package.py diff --git a/var/spack/packages/libffi/package.py b/var/spack/packages/libffi/package.py new file mode 100644 index 0000000000..2c1c4eed4d --- /dev/null +++ b/var/spack/packages/libffi/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libffi(Package): + """The libffi library provides a portable, high level programming + interface to various calling conventions. This allows a programmer + to call any function specified by a call interface description at + run time.""" + homepage = "https://sourceware.org/libffi/" + url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz" + + version('3.1', 'f5898b29bbfd70502831a212d9249d10') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py new file mode 100644 index 0000000000..db2d954445 --- /dev/null +++ b/var/spack/packages/python/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Python(Package): + """The Python programming language.""" + homepage = "http://www.python.org" + url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz" + + version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') + + depends_on("openssl") + depends_on("sqlite") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 1656f62a125232b30235da01e816a3ade4481b8a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 1 Nov 2014 16:56:19 -0700 Subject: Add bzip2 package and spack pkg add command. --- lib/spack/spack/cmd/pkg.py | 17 ++++++++++++++++- var/spack/packages/bzip2/package.py | 19 +++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 var/spack/packages/bzip2/package.py diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index 82ebd13ff9..da905603fa 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -37,6 +37,10 @@ def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='pkg_command') + add_parser = sp.add_parser('add', help=pkg_add.__doc__) + add_parser.add_argument('packages', nargs=argparse.REMAINDER, + help="Names of packages to add to git repo.") + list_parser = sp.add_parser('list', help=pkg_list.__doc__) list_parser.add_argument('rev', default='HEAD', nargs='?', help="Revision to list packages for.") @@ -79,6 +83,16 @@ def list_packages(rev): return sorted(line[len(relpath):] for line in output.split('\n') if line) +def pkg_add(args): + for pkg_name in args.packages: + filename = spack.db.filename_for_package_name(pkg_name) + if not os.path.isfile(filename): + tty.die("No such package: %s. Path does not exist:" % pkg_name, filename) + + git = get_git() + git('-C', spack.packages_path, 'add', filename) + + def pkg_list(args): """List packages associated with a particular spack git revision.""" colify(list_packages(args.rev)) @@ -117,7 +131,8 @@ def pkg_added(args): def pkg(parser, args): - action = { 'diff' : pkg_diff, + action = { 'add' : pkg_add, + 'diff' : pkg_diff, 'list' : pkg_list, 'removed' : pkg_removed, 'added' : pkg_added } diff --git a/var/spack/packages/bzip2/package.py b/var/spack/packages/bzip2/package.py new file mode 100644 index 0000000000..83ae88e564 --- /dev/null +++ b/var/spack/packages/bzip2/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Bzip2(Package): + """bzip2 is a freely available, patent free high-quality data + compressor. It typically compresses files to within 10% to 15% + of the best available techniques (the PPM family of statistical + compressors), whilst being around twice as fast at compression + and six times faster at decompression.""" + homepage = "http://www.bzip.org" + url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz" + + version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b') + + def install(self, spec, prefix): + # No configure system -- have to filter the makefile for this package. + filter_file(r'CC=gcc', 'CC=cc', 'Makefile', string=True) + + make() + make("install", "PREFIX=%s" % prefix) -- cgit v1.2.3-70-g09d2 From 0f3b80cddbfd775c10f68423444aca792525f856 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 2 Nov 2014 23:53:45 -0800 Subject: Fix for SPACK-11: Spack compiler wrapper is now in bash. - Startup is much faster - Added test for compiler wrapper parsing. - Removed old compilation module that had to be imported by old cc. - Removed cc from python version checks now that it's bash. --- lib/spack/env/cc | 466 +++++++++++++++++++++++---------- lib/spack/spack/build_environment.py | 8 +- lib/spack/spack/cmd/compiler.py | 2 +- lib/spack/spack/compilation.py | 117 --------- lib/spack/spack/compiler.py | 2 +- lib/spack/spack/compilers/__init__.py | 2 +- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/cc.py | 130 +++++++++ lib/spack/spack/test/python_version.py | 1 - lib/spack/spack/util/environment.py | 9 + 10 files changed, 472 insertions(+), 268 deletions(-) delete mode 100644 lib/spack/spack/compilation.py create mode 100644 lib/spack/spack/test/cc.py diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 266e41cb48..f68cb0b2cd 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -1,140 +1,326 @@ -#!/usr/bin/env python -import sys -if not sys.version_info[:2] >= (2,6): - sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info) - -import os -import re -import subprocess -from contextlib import closing - -# Import spack parameters through the build environment. -spack_lib = os.environ.get("SPACK_LIB") -if not spack_lib: - print "Spack compiler must be run from spack!" - sys.exit(1) - -# Grab a minimal set of spack packages -sys.path.append(spack_lib) -from spack.compilation import * -from external import argparse -import llnl.util.tty as tty - -spack_prefix = get_env_var("SPACK_PREFIX") -spack_debug = get_env_flag("SPACK_DEBUG") -spack_deps = get_path("SPACK_DEPENDENCIES") -spack_env_path = get_path("SPACK_ENV_PATH") -spack_debug_log_dir = get_env_var("SPACK_DEBUG_LOG_DIR") -spack_spec = get_env_var("SPACK_SPEC") - -compiler_spec = get_env_var("SPACK_COMPILER_SPEC") -spack_cc = get_env_var("SPACK_CC", required=False) -spack_cxx = get_env_var("SPACK_CXX", required=False) -spack_f77 = get_env_var("SPACK_F77", required=False) -spack_fc = get_env_var("SPACK_FC", required=False) - -# Figure out what type of operation we're doing -command = os.path.basename(sys.argv[0]) - -cpp, cc, ccld, ld, version_check = range(5) - -if command == 'cpp': - mode = cpp -elif command == 'ld': - mode = ld -elif '-E' in sys.argv: - mode = cpp -elif '-c' in sys.argv: - mode = cc -else: - mode = ccld - - -if command in ('cc', 'gcc', 'c89', 'c99', 'clang'): - command = spack_cc - language = "C" -elif command in ('c++', 'CC', 'g++', 'clang++'): - command = spack_cxx - language = "C++" -elif command in ('f77'): - command = spack_f77 - language = "Fortran 77" -elif command in ('fc', 'f90', 'f95'): - command = spack_fc - language = "Fortran 90" -elif command in ('ld', 'cpp'): - pass # leave it the same. TODO: what's the right thing? -else: - raise Exception("Unknown compiler: %s" % command) - -if command is None: - print "ERROR: Compiler '%s' does not support compiling %s programs." % ( - compiler_spec, language) - sys.exit(1) - -version_args = ['-V', '-v', '--version', '-dumpversion'] -if any(arg in sys.argv for arg in version_args): - mode = version_check - -# Parse out the includes, libs, etc. so we can adjust them if need be. -parser = argparse.ArgumentParser(add_help=False) -parser.add_argument("-I", action='append', default=[], dest='include_path') -parser.add_argument("-L", action='append', default=[], dest='lib_path') -parser.add_argument("-l", action='append', default=[], dest='libs') - -options, other_args = parser.parse_known_args() -rpaths, other_args = parse_rpaths(other_args) - -# Add dependencies' include and lib paths to our compiler flags. -def add_if_dir(path_list, directory, index=None): - if os.path.isdir(directory): - if index is None: - path_list.append(directory) - else: - path_list.insert(index, directory) - -for dep_dir in spack_deps: - add_if_dir(options.include_path, os.path.join(dep_dir, "include")) - add_if_dir(options.lib_path, os.path.join(dep_dir, "lib")) - add_if_dir(options.lib_path, os.path.join(dep_dir, "lib64")) - -# Add our modified arguments to it. -arguments = ['-I%s' % path for path in options.include_path] -arguments += other_args -arguments += ['-L%s' % path for path in options.lib_path] -arguments += ['-l%s' % path for path in options.libs] - -# Add rpaths to install dir and its dependencies. We add both lib and lib64 -# here because we don't know which will be created. -rpaths.extend(options.lib_path) -rpaths.append('%s/lib' % spack_prefix) -rpaths.append('%s/lib64' % spack_prefix) -if mode == ccld: - arguments += ['-Wl,-rpath,%s' % p for p in rpaths] -elif mode == ld: - pairs = [('-rpath', '%s' % p) for p in rpaths] - arguments += [item for sublist in pairs for item in sublist] - -# Unset some pesky environment variables -for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]: - if var in os.environ: - os.environ.pop(var) - -# Ensure that the delegated command doesn't just call this script again. -remove_paths = ['.'] + spack_env_path -path = [p for p in get_path("PATH") if p not in remove_paths] -os.environ["PATH"] = ":".join(path) - -full_command = [command] + arguments - -if spack_debug: - input_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.in.log' % spack_spec) - output_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.out.log' % spack_spec) - with closing(open(input_log, 'a')) as log: - args = [os.path.basename(sys.argv[0])] + sys.argv[1:] - log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args)) - with closing(open(output_log, 'a')) as log: - log.write("%s\n" % " ".join(full_command)) - -rcode = subprocess.call(full_command) -sys.exit(rcode) +#!/bin/bash +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +# +# Spack compiler wrapper script. +# +# Compiler commands go through this compiler wrapper in Spack builds. +# The compiler wrapper is a thin layer around the standard compilers. +# It enables several key pieces of functionality: +# +# 1. It allows Spack to swap compilers into and out of builds easily. +# 2. It adds several options to the compile line so that spack +# packages can find their dependencies at build time and run time: +# -I arguments for dependency /include directories. +# -L arguments for dependency /lib directories. +# -Wl,-rpath arguments for dependency /lib directories. +# + +# This is the list of environment variables that need to be set before +# the script runs. They are set by routines in spack.build_environment +# as part of spack.package.Package.do_install(). +parameters=" +SPACK_PREFIX +SPACK_ENV_PATH +SPACK_DEBUG_LOG_DIR +SPACK_COMPILER_SPEC +SPACK_SHORT_SPEC" + +# The compiler input variables are checked for sanity later: +# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC +# Debug flag is optional; set to true for debug logging: +# SPACK_DEBUG +# Test command is used to unit test the compiler script. +# SPACK_TEST_COMMAND +# Dependencies can be empty for pkgs with no deps: +# SPACK_DEPENDENCIES + +# die() +# Prints a message and exits with error 1. +function die { + echo "$@" + exit 1 +} + +for param in $parameters; do + if [ -z "${!param}" ]; then + die "Spack compiler must be run from spack! Input $param was missing!" + fi +done + +# +# Figure out the type of compiler, the language, and the mode so that +# the compiler script knows what to do. +# +# Possible languages are C, C++, Fortran 77, and Fortran 90. +# 'command' is set based on the input command to $SPACK_[CC|CXX|F77|F90] +# +# 'mode' is set to one of: +# cc compile +# ld link +# ccld compile & link +# cpp preprocessor +# vcheck version check +# +command=$(basename "$0") +case "$command" in + cc|gcc|c89|c99|clang) + command="$SPACK_CC" + language="C" + ;; + c++|CC|g++|clang++) + command="$SPACK_CXX" + language="C++" + ;; + f77) + command="$SPACK_F77" + language="Fortran 77" + ;; + fc|f90|f95) + command="$SPACK_FC" + language="Fortran 90" + ;; + cpp) + mode=cpp + ;; + ld) + mode=ld + ;; + *) + die "Unkown compiler: $command" + ;; +esac + +# Finish setting up the mode. +if [ -z "$mode" ]; then + mode=ccld + for arg in "$@"; do + if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then + mode=vcheck + break + elif [ "$arg" = -E ]; then + mode=cpp + break + elif [ "$arg" = -c ]; then + mode=cc + break + fi + done +fi + +# Dump the version and exist if we're in testing mode. +if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then + echo "$mode" + exit +fi + +# Check that at least one of the real commands was actually selected, +# otherwise we don't know what to execute. +if [ -z "$command" ]; then + die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs." +fi + +# Save original command for debug logging +input_command="$@" + +# +# Now do real parsing of the command line args, trying hard to keep +# non-rpath linker arguments in the proper order w.r.t. other command +# line arguments. This is important for things like groups. +# +includes=() +libraries=() +libs=() +rpaths=() +other_args=() + +while [ -n "$1" ]; do + case "$1" in + -I*) + arg="${1#-I}" + if [ -z "$arg" ]; then shift; arg="$1"; fi + includes+=("$arg") + ;; + -L*) + arg="${1#-L}" + if [ -z "$arg" ]; then shift; arg="$1"; fi + libraries+=("$arg") + ;; + -l*) + arg="${1#-l}" + if [ -z "$arg" ]; then shift; arg="$1"; fi + libs+=("$arg") + ;; + -Wl,*) + arg="${1#-Wl,}" + if [ -z "$arg" ]; then shift; arg="$1"; fi + if [[ "$arg" = -rpath=* ]]; then + rpaths+=("${arg#-rpath=}") + elif [[ "$arg" = -rpath ]]; then + shift; arg="$1" + if [[ "$arg" != -Wl,* ]]; then + die "-Wl,-rpath was not followed by -Wl,*" + fi + rpaths+=("${arg#-Wl,}") + else + other_args+=("-Wl,$arg") + fi + ;; + -Xlinker,*) + arg="${1#-Xlinker,}" + if [ -z "$arg" ]; then shift; arg="$1"; fi + if [[ "$arg" = -rpath=* ]]; then + rpaths+=("${arg#-rpath=}") + elif [[ "$arg" = -rpath ]]; then + shift; arg="$1" + if [[ "$arg" != -Xlinker,* ]]; then + die "-Xlinker,-rpath was not followed by -Xlinker,*" + fi + rpaths+=("${arg#-Xlinker,}") + else + other_args+=("-Xlinker,$arg") + fi + ;; + *) + other_args+=("$1") + ;; + esac + shift +done + +# Dump parsed values for unit testing if asked for +if [ -n "$SPACK_TEST_COMMAND" ]; then + IFS=$'\n' + case "$SPACK_TEST_COMMAND" in + dump-includes) echo "${includes[*]}";; + dump-libraries) echo "${libraries[*]}";; + dump-libs) echo "${libs[*]}";; + dump-rpaths) echo "${rpaths[*]}";; + dump-other-args) echo "${other_args[*]}";; + dump-all) + echo "INCLUDES:" + echo "${includes[*]}" + echo + echo "LIBRARIES:" + echo "${libraries[*]}" + echo + echo "LIBS:" + echo "${libs[*]}" + echo + echo "RPATHS:" + echo "${rpaths[*]}" + echo + echo "ARGS:" + echo "${other_args[*]}" + ;; + *) + echo "ERROR: Unknown test command" + exit 1 ;; + esac + exit +fi + +# Read spack dependencies from the path environment variable +IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES" +for dep in "${deps[@]}"; do + if [ -d "$dep/include" ]; then + includes+=("$dep/include") + fi + + if [ -d "$dep/lib" ]; then + libraries+=("$dep/lib") + rpaths+=("$dep/lib") + fi + + if [ -d "$dep/lib64" ]; then + libraries+=("$dep/lib64") + rpaths+=("$dep/lib64") + fi +done + +# Include all -L's and prefix/whatever dirs in rpath +for dir in "${libraries[@]}"; do + [ "$dir" != "." ] && rpaths+=("$dir") +done +rpaths+=("$SPACK_PREFIX/lib") +rpaths+=("$SPACK_PREFIX/lib64") + +# Put the arguments together +args=() +for dir in "${includes[@]}"; do args+=("-I$dir"); done +args+=("${other_args[@]}") +for dir in "${libraries[@]}"; do args+=("-L$dir"); done +for lib in "${libs[@]}"; do args+=("-l$lib"); done + +if [ "$mode" = ccld ]; then + for dir in "${rpaths[@]}"; do args+=("-Wl,-rpath=$dir"); done +elif [ "$mode" = ld ]; then + for dir in "${rpaths[@]}"; do args+=("-rpath=$dir"); done +fi + +# +# Unset pesky environment variables that could affect build sanity. +# +unset LD_LIBRARY_PATH +unset LD_RUN_PATH +unset DYLD_LIBRARY_PATH + +# +# Filter '.' and Spack environment directories out of PATH so that +# this script doesn't just call itself +# +IFS=':' read -ra env_path <<< "$PATH" +IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH" +spack_env_dirs+=(".") +PATH="" +for dir in "${env_path[@]}"; do + remove="" + for rm_dir in "${spack_env_dirs[@]}"; do + if [ "$dir" = "$rm_dir" ]; then remove=True; fi + done + if [ -z "$remove" ]; then + if [ -z "$PATH" ]; then + PATH="$dir" + else + PATH="$PATH:$dir" + fi + fi +done +export PATH + +full_command=("$command") +full_command+=("${args[@]}") + +# +# Write the input and output commands to debug logs if it's asked for. +# +if [ "$SPACK_DEBUG" = "TRUE" ]; then + input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log" + output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log" + echo "$input_command" >> $input_log + echo "$mode ${full_command[@]}" >> $output_log +fi + +exec "${full_command[@]}" diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index b71c543e5d..a2fcff1f10 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -48,12 +48,11 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE' # set_build_environment_variables and used to pass parameters to # Spack's compiler wrappers. # -SPACK_LIB = 'SPACK_LIB' SPACK_ENV_PATH = 'SPACK_ENV_PATH' SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' SPACK_PREFIX = 'SPACK_PREFIX' SPACK_DEBUG = 'SPACK_DEBUG' -SPACK_SPEC = 'SPACK_SPEC' +SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC' SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' @@ -108,9 +107,6 @@ def set_compiler_environment_variables(pkg): def set_build_environment_variables(pkg): """This ensures a clean install environment when we build packages. """ - # This tells the compiler script where to find the Spack installation. - os.environ[SPACK_LIB] = spack.lib_path - # Add spack build environment path with compiler wrappers first in # the path. We handle case sensitivity conflicts like "CC" and # "cc" by putting one in the /case-insensitive @@ -140,7 +136,7 @@ def set_build_environment_variables(pkg): # Working directory for the spack command itself, for debug logs. if spack.debug: os.environ[SPACK_DEBUG] = "TRUE" - os.environ[SPACK_SPEC] = str(pkg.spec) + os.environ[SPACK_SHORT_SPEC] = pkg.spec.short_spec os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir # Add dependencies to CMAKE_PREFIX_PATH diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index ac9c844a4c..5c46a3536d 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -31,7 +31,7 @@ from llnl.util.lang import index_by import spack.compilers import spack.spec import spack.config -from spack.compilation import get_path +from spack.util.environment import get_path from spack.spec import CompilerSpec description = "Manage compilers" diff --git a/lib/spack/spack/compilation.py b/lib/spack/spack/compilation.py deleted file mode 100644 index 3a469376a8..0000000000 --- a/lib/spack/spack/compilation.py +++ /dev/null @@ -1,117 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -"""\ -The ``compilation`` module contains utility functions used by the compiler -wrapper script. - -.. todo:: - - Think about moving this into the script to increase compilation - speed. - -""" -import os -import sys - - -def get_env_var(name, required=True): - value = os.environ.get(name) - if required and value is None: - print "%s must be run from spack." % os.path.abspath(sys.argv[0]) - sys.exit(1) - return value - - -def get_env_flag(name, required=False): - value = get_env_var(name, required) - if value: - return value.lower() == "true" - return False - - -def get_path(name): - path = os.environ.get(name, "").strip() - if path: - return path.split(":") - else: - return [] - - -def parse_rpaths(arguments): - """argparse, for all its features, cannot understand most compilers' - rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'""" - def get_next(arg, args): - """Get an expected next value of an iterator, or die if it's not there""" - try: - return next(args) - except StopIteration: - # quietly ignore -rpath and -Xlinker without args. - return None - - other_args = [] - def linker_args(): - """This generator function allows us to parse the linker args separately - from the compiler args, so that we can handle them more naturally. - """ - args = iter(arguments) - for arg in args: - if arg.startswith('-Wl,'): - sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')] - for arg in sub_args: - yield arg - elif arg == '-Xlinker': - target = get_next(arg, args) - if target is not None: - yield target - else: - other_args.append(arg) - - # Extract all the possible ways rpath can appear in linker args, then - # append non-rpaths to other_args. This happens in-line as the linker - # args are extracted, so we preserve the original order of arguments. - # This is important for args like --whole-archive, --no-whole-archive, - # and others that tell the linker how to handle the next few libraries - # it encounters on the command line. - rpaths = [] - largs = linker_args() - for arg in largs: - if arg == '-rpath': - target = get_next(arg, largs) - if target is not None: - rpaths.append(target) - - elif arg.startswith('-R'): - target = arg.replace('-R', '', 1) - if not target: - target = get_next(arg, largs) - if target is None: break - - if os.path.isdir(target): - rpaths.append(target) - else: - other_args.extend(['-Wl,' + arg, '-Wl,' + target]) - else: - other_args.append('-Wl,' + arg) - return rpaths, other_args diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 8debf518c7..646050d267 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -35,8 +35,8 @@ import spack.error import spack.spec from spack.util.multiproc import parmap from spack.util.executable import * +from spack.util.environment import get_path from spack.version import Version -from spack.compilation import get_path __all__ = ['Compiler', 'get_compiler_version'] diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 467472cced..e572cd89b6 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -40,7 +40,7 @@ from spack.util.multiproc import parmap from spack.compiler import Compiler from spack.util.executable import which from spack.util.naming import mod_to_class -from spack.compilation import get_path +from spack.util.environment import get_path _imported_compilers_module = 'spack.compilers' _required_instance_vars = ['cc', 'cxx', 'f77', 'fc'] diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 9eae3261c2..c6a371fd0d 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -53,7 +53,8 @@ test_names = ['versions', 'svn_fetch', 'hg_fetch', 'mirror', - 'url_extrapolate'] + 'url_extrapolate', + 'cc'] def list_tests(): diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py new file mode 100644 index 0000000000..aa16f9b351 --- /dev/null +++ b/lib/spack/spack/test/cc.py @@ -0,0 +1,130 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +This test checks that the Spack cc compiler wrapper is parsing +arguments correctly. +""" +import os +import unittest + +from llnl.util.filesystem import * +import spack +from spack.util.executable import * + +# Complicated compiler test command +test_command = [ + '-I/test/include', '-L/test/lib', '-L/other/lib', '-I/other/include', + 'arg1', + '-Wl,--start-group', + 'arg2', + '-Wl,-rpath=/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath', + '-llib1', '-llib2', + 'arg4', + '-Wl,--end-group', + '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath=/fourth/rpath', + '-llib3', '-llib4', + 'arg5', 'arg6'] + + +class CompilerTest(unittest.TestCase): + + def setUp(self): + self.cc = Executable(join_path(spack.build_env_path, "cc")) + self.ld = Executable(join_path(spack.build_env_path, "ld")) + self.cpp = Executable(join_path(spack.build_env_path, "cpp")) + + os.environ['SPACK_CC'] = "/bin/mycc" + os.environ['SPACK_PREFIX'] = "/usr" + os.environ['SPACK_ENV_PATH']="test" + os.environ['SPACK_DEBUG_LOG_DIR'] = "." + os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7" + os.environ['SPACK_SHORT_SPEC'] = "foo@1.2" + + + def check_cc(self, command, args, expected): + os.environ['SPACK_TEST_COMMAND'] = command + self.assertEqual(self.cc(*args, return_output=True).strip(), expected) + + + def check_ld(self, command, args, expected): + os.environ['SPACK_TEST_COMMAND'] = command + self.assertEqual(self.ld(*args, return_output=True).strip(), expected) + + + def check_cpp(self, command, args, expected): + os.environ['SPACK_TEST_COMMAND'] = command + self.assertEqual(self.cpp(*args, return_output=True).strip(), expected) + + + def test_vcheck_mode(self): + self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck") + self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck") + self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck") + self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck") + self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck") + self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck") + + + def test_cpp_mode(self): + self.check_cc('dump-mode', ['-E'], "cpp") + self.check_cpp('dump-mode', [], "cpp") + + + def test_ccld_mode(self): + self.check_cc('dump-mode', [], "ccld") + self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld") + self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath=foo'], "ccld") + self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ccld") + + + def test_ld_mode(self): + self.check_ld('dump-mode', [], "ld") + self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ld") + + + def test_includes(self): + self.check_cc('dump-includes', test_command, + "\n".join(["/test/include", "/other/include"])) + + + def test_libraries(self): + self.check_cc('dump-libraries', test_command, + "\n".join(["/test/lib", "/other/lib"])) + + + def test_libs(self): + self.check_cc('dump-libs', test_command, + "\n".join(["lib1", "lib2", "lib3", "lib4"])) + + + def test_rpaths(self): + self.check_cc('dump-rpaths', test_command, + "\n".join(["/first/rpath", "/second/rpath", "/third/rpath", "/fourth/rpath"])) + + + def test_other_args(self): + self.check_cc('dump-other-args', test_command, + "\n".join(["arg1", "-Wl,--start-group", "arg2", "arg3", "arg4", + "-Wl,--end-group", "arg5", "arg6"])) diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index 04b4eadf34..f814df3226 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -45,7 +45,6 @@ class PythonVersionTest(unittest.TestCase): def spack_python_files(self): # first file is the spack script. yield spack.spack_file - yield os.path.join(spack.build_env_path, 'cc') # Next files are all the source files and package files. search_paths = [spack.lib_path, spack.var_path] diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index 435d912185..afdf51c707 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -24,6 +24,15 @@ ############################################################################## import os + +def get_path(name): + path = os.environ.get(name, "").strip() + if path: + return path.split(":") + else: + return [] + + def env_flag(name): if name in os.environ: return os.environ[name].lower() == "true" -- cgit v1.2.3-70-g09d2 From 6c8c41da98a01d595b8ef77175e2c717accf02d7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 3 Nov 2014 00:15:05 -0800 Subject: Working Python 2.7.8, ncurses, readline --- var/spack/packages/ncurses/package.py | 8 ++++++-- var/spack/packages/python/package.py | 11 ++++++++++- var/spack/packages/readline/package.py | 21 +++++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 var/spack/packages/readline/package.py diff --git a/var/spack/packages/ncurses/package.py b/var/spack/packages/ncurses/package.py index 4885caa628..2556e600f6 100644 --- a/var/spack/packages/ncurses/package.py +++ b/var/spack/packages/ncurses/package.py @@ -10,10 +10,14 @@ class Ncurses(Package): homepage = "http://invisible-island.net/ncurses/ncurses.html" version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', - url='http://invisible-island.net/datafiles/release/ncurses.tar.gz') + url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure("--prefix=%s" % prefix, + "--with-shared", + "--enable-widec", + "--enable-pc-files", + "--without-ada") make() make("install") diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index db2d954445..e6c3e28820 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -8,9 +8,18 @@ class Python(Package): version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') depends_on("openssl") + depends_on("bzip2") + depends_on("readline") + depends_on("ncurses") depends_on("sqlite") def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + # Need this to allow python build to find the Python installation. + env['PYTHONHOME'] = prefix + + # Rest of install is pretty standard. + configure("--prefix=%s" % prefix, + "--with-threads", + "--enable-shared") make() make("install") diff --git a/var/spack/packages/readline/package.py b/var/spack/packages/readline/package.py new file mode 100644 index 0000000000..1b870e0e7f --- /dev/null +++ b/var/spack/packages/readline/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Readline(Package): + """The GNU Readline library provides a set of functions for use by + applications that allow users to edit command li nes as they + are typed in. Both Emacs and vi editing modes are + available. The Readline library includes additional functions + to maintain a list of previously-entered command lines, to + recall and perhaps reedit those lines, and perform csh-like + history expansion on previous commands. """ + homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" + url = "ftp://ftp.cwru.edu/pub/bash/readline-6.3.tar.gz" + + version('6.3', '33c8fb279e981274f485fd91da77e94a') + + depends_on("ncurses") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make("SHLIB_LIBS=-lncurses") + make("install") -- cgit v1.2.3-70-g09d2 From 6c4bac2ed8544e6b0e7d8f99aeeb18144a81772f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 3 Nov 2014 14:17:10 -0800 Subject: Update libmonitor URL to point to google code. --- var/spack/packages/libmonitor/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/libmonitor/package.py b/var/spack/packages/libmonitor/package.py index ed619e4cce..3b95b86ddf 100644 --- a/var/spack/packages/libmonitor/package.py +++ b/var/spack/packages/libmonitor/package.py @@ -28,7 +28,7 @@ class Libmonitor(Package): """Libmonitor is a library for process and thread control.""" homepage = "http://hpctoolkit.org" - version('20130218', svn='https://outreach.scidac.gov/svn/libmonitor/trunk', revision=146) + version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146) def install(self, spec, prefix): configure("--prefix=" + prefix) -- cgit v1.2.3-70-g09d2 From 7905b50dcb14c1a71303ce38dc6b0bdb7931d89c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 3 Nov 2014 01:44:30 -0800 Subject: Bump ImageMagick version --- var/spack/packages/ImageMagick/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/ImageMagick/package.py b/var/spack/packages/ImageMagick/package.py index 39c733e0df..ae06368f85 100644 --- a/var/spack/packages/ImageMagick/package.py +++ b/var/spack/packages/ImageMagick/package.py @@ -3,9 +3,10 @@ from spack import * class Imagemagick(Package): """ImageMagick is a image processing library""" homepage = "http://www.imagemagic.org" - url = "http://www.imagemagick.org/download/ImageMagick-6.8.9-9.tar.gz" + url = "http://www.imagemagick.org/download/ImageMagick-6.8.9-10.tar.gz" - version('6.8.9-9', 'e63fed3e3550851328352c708f800676') + version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c') + version('6.8.9-9', 'e63fed3e3550851328352c708f800676') depends_on('libtool') depends_on('jpeg') -- cgit v1.2.3-70-g09d2 From a37828bafba2f594e54e0e7487df909fdeac4c8f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 8 Oct 2014 21:59:47 -0700 Subject: Packages for gcc and its dependencies. --- var/spack/packages/gcc/package.py | 43 ++++++++++++++++++++++++++++++++++++++ var/spack/packages/gmp/package.py | 40 +++++++++++++++++++++++++++++++++++ var/spack/packages/mpc/package.py | 42 +++++++++++++++++++++++++++++++++++++ var/spack/packages/mpfr/package.py | 38 +++++++++++++++++++++++++++++++++ 4 files changed, 163 insertions(+) create mode 100644 var/spack/packages/gcc/package.py create mode 100644 var/spack/packages/gmp/package.py create mode 100644 var/spack/packages/mpc/package.py create mode 100644 var/spack/packages/mpfr/package.py diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py new file mode 100644 index 0000000000..bb5fee8192 --- /dev/null +++ b/var/spack/packages/gcc/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Gcc(Package): + """The GNU Compiler Collection includes front ends for C, C++, + Objective-C, Fortran, and Java.""" + homepage = "https://gcc.gnu.org" + url = "http://www.netgull.com/gcc/releases/gcc-4.9.1/gcc-4.9.1.tar.bz2" + + version('4.9.1', 'fddf71348546af523353bd43d34919c1') + + depends_on("mpc") + depends_on("mpfr") + depends_on("gmp") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--disable-multilib") + make() + make("install") diff --git a/var/spack/packages/gmp/package.py b/var/spack/packages/gmp/package.py new file mode 100644 index 0000000000..d6af821b34 --- /dev/null +++ b/var/spack/packages/gmp/package.py @@ -0,0 +1,40 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Gmp(Package): + """GMP is a free library for arbitrary precision arithmetic, + operating on signed integers, rational numbers, and + floating-point numbers.""" + homepage = "https://gmplib.org" + url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2" + + version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470') + version('6.0.0' , '6ef5869ae735db9995619135bd856b84') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/mpc/package.py b/var/spack/packages/mpc/package.py new file mode 100644 index 0000000000..6fbfca3007 --- /dev/null +++ b/var/spack/packages/mpc/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpc(Package): + """Gnu Mpc is a C library for the arithmetic of complex numbers + with arbitrarily high precision and correct rounding of the + result.""" + homepage = "http://www.multiprecision.org" + url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz" + + version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') + + depends_on("gmp") + depends_on("mpfr") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/mpfr/package.py b/var/spack/packages/mpfr/package.py new file mode 100644 index 0000000000..62dac63206 --- /dev/null +++ b/var/spack/packages/mpfr/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpfr(Package): + """The MPFR library is a C library for multiple-precision + floating-point computations with correct rounding.""" + homepage = "http://www.mpfr.org" + url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.2.tar.bz2" + + version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From b97ee67a4b8ddddf9af8a93c3e8292e34d507467 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 5 Nov 2014 09:54:43 -0800 Subject: Working GCC package. --- lib/spack/llnl/util/filesystem.py | 23 +++++++++++++++-- var/spack/packages/gcc/package.py | 53 ++++++++++++++++++++++++++++++++++++--- 2 files changed, 71 insertions(+), 5 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index dc722297ec..6a04d98a18 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -22,8 +22,9 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp', - 'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter'] +__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', + 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', + 'can_access', 'filter_file', 'change_sed_delimiter'] import os import sys @@ -127,10 +128,19 @@ def change_sed_delimiter(old_delim, new_delim, *filenames): filter_file(double_quoted, '"%s"' % repl, f) +def set_install_permissions(path): + """Set appropriate permissions on the installed file.""" + if os.path.isdir(path): + os.chmod(path, 0755) + else: + os.chmod(path, 0644) + + def install(src, dest): """Manually install a file to a particular location.""" tty.info("Installing %s to %s" % (src, dest)) shutil.copy(src, dest) + set_install_permissions(dest) def expand_user(path): @@ -152,6 +162,15 @@ def mkdirp(*paths): raise OSError(errno.EEXIST, "File alredy exists", path) +def force_remove(*paths): + """Remove files without printing errors. Like rm -f, does NOT + remove directories.""" + for path in paths: + try: + os.remove(path) + except OSError, e: + pass + @contextmanager def working_dir(dirname, **kwargs): if kwargs.get('create', False): diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index bb5fee8192..e1f1084c96 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -24,20 +24,67 @@ ############################################################################## from spack import * +from contextlib import closing +from glob import glob + class Gcc(Package): """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, and Java.""" homepage = "https://gcc.gnu.org" - url = "http://www.netgull.com/gcc/releases/gcc-4.9.1/gcc-4.9.1.tar.bz2" - version('4.9.1', 'fddf71348546af523353bd43d34919c1') + list_url = 'http://mirrors.kernel.org/gnu/gcc/' + list_depth = 2 + + version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43', + url="http://mirrors.kernel.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2") + version('4.9.1', 'fddf71348546af523353bd43d34919c1', + url="http://mirrors.kernel.org/gnu/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2") depends_on("mpc") depends_on("mpfr") depends_on("gmp") + depends_on("libelf") + def install(self, spec, prefix): + # libjava/configure needs a minor fix to install into spack paths. + filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True) + + # Rest of install is straightforward. configure("--prefix=%s" % prefix, - "--disable-multilib") + "--libdir=%s/lib64" % prefix, + "--disable-multilib", + "--enable-languages=c,c++,fortran,java,objc,go", + "--enable-lto", + "--with-quad") make() make("install") + + self.write_rpath_specs() + + + @property + def spec_dir(self): + # e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2 + spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix) + return spec_dir[0] if spec_dir else None + + + def write_rpath_specs(self): + """Generate a spec file so the linker adds a rpath to the libs + the compiler used to build the executable.""" + if not self.spec_dir: + tty.warn("Could not install specs for %s." % self.spec.format('$_$@')) + return + + gcc = Executable(join_path(self.prefix.bin, 'gcc')) + lines = gcc('-dumpspecs', return_output=True).split("\n") + for i, line in enumerate(lines): + if line.startswith("*link:"): + specs_file = join_path(self.spec_dir, 'specs') + with closing(open(specs_file, 'w')) as out: + out.write(lines[i] + "\n") + out.write("-rpath %s/lib:%s/lib64 \\\n" + % (self.prefix, self.prefix)) + out.write(lines[i+1] + "\n") + set_install_permissions(specs_file) -- cgit v1.2.3-70-g09d2 From 193eddda5eea2e91c4f8f33e16224a7bd844ebfa Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 6 Nov 2014 11:46:43 -0800 Subject: Fix for missing format_doc in package-list command. --- lib/spack/spack/cmd/info.py | 2 +- lib/spack/spack/cmd/package-list.py | 18 +----------------- lib/spack/spack/package.py | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 3e4ff627d5..eafafc601a 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -68,7 +68,7 @@ def print_text_info(pkg): print print "Description:" if pkg.__doc__: - print format_doc(pkg, indent=4) + print pkg.format_doc(indent=4) else: print " None" diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index aa576bddc2..87c528881e 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -23,7 +23,6 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import re -import textwrap from StringIO import StringIO import llnl.util.tty as tty from llnl.util.tty.colify import * @@ -32,21 +31,6 @@ import spack description = "Print a list of all packages in reStructuredText." -def format_doc(pkg, **kwargs): - """Wrap doc string at 72 characters and format nicely""" - indent = kwargs.get('indent', 0) - - if not pkg.__doc__: - return "" - - doc = re.sub(r'\s+', ' ', pkg.__doc__) - lines = textwrap.wrap(doc, 72) - results = StringIO() - for line in lines: - results.write((" " * indent) + line + "\n") - return results.getvalue() - - def github_url(pkg): """Link to a package file on github.""" return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" % @@ -99,7 +83,7 @@ def print_rst_package_list(): for d in pkg.dependencies) print print "Description" - print format_doc(pkg, indent=2) + print pkg.format_doc(indent=2) print print "-----" diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 7cf94ed1ef..e4afb1d027 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -40,6 +40,8 @@ import subprocess import platform as py_platform import multiprocessing from urlparse import urlparse +import textwrap +from StringIO import StringIO import llnl.util.tty as tty from llnl.util.filesystem import * @@ -852,6 +854,23 @@ class Package(object): self.stage.destroy() + def format_doc(self, **kwargs): + """Wrap doc string at 72 characters and format nicely""" + indent = kwargs.get('indent', 0) + + if not self.__doc__: + return "" + + doc = re.sub(r'\s+', ' ', self.__doc__) + lines = textwrap.wrap(doc, 72) + results = StringIO() + for line in lines: + results.write((" " * indent) + line + "\n") + return results.getvalue() + + + + def fetch_available_versions(self): if not hasattr(self, 'url'): raise VersionFetchError(self.__class__) -- cgit v1.2.3-70-g09d2 From d78ece658b0b139604998886a788acc11e661b14 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 7 Nov 2014 00:13:52 -0800 Subject: Change to faster gcc mirror that allows spidering. --- var/spack/packages/gcc/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index e1f1084c96..da0debd5dc 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -32,13 +32,13 @@ class Gcc(Package): Objective-C, Fortran, and Java.""" homepage = "https://gcc.gnu.org" - list_url = 'http://mirrors.kernel.org/gnu/gcc/' + list_url = 'http://open-source-box.org/gcc/' list_depth = 2 version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43', - url="http://mirrors.kernel.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2") + url="http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2") version('4.9.1', 'fddf71348546af523353bd43d34919c1', - url="http://mirrors.kernel.org/gnu/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2") + url="http://open-source-box.org/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2") depends_on("mpc") depends_on("mpfr") -- cgit v1.2.3-70-g09d2 From 55bf243f166e38451026813fa2bdc2c90263d1aa Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 7 Nov 2014 00:17:25 -0800 Subject: Improved website scraping. --- bin/spack | 1 + lib/spack/spack/cmd/checksum.py | 34 ++++++------ lib/spack/spack/cmd/create.py | 7 ++- lib/spack/spack/cmd/versions.py | 20 ++++++- lib/spack/spack/concretize.py | 2 +- lib/spack/spack/package.py | 119 ++++++++++++++++++++-------------------- lib/spack/spack/url.py | 6 +- lib/spack/spack/util/web.py | 40 ++++++++++---- 8 files changed, 134 insertions(+), 95 deletions(-) diff --git a/bin/spack b/bin/spack index 75874ca39e..9fbb65f349 100755 --- a/bin/spack +++ b/bin/spack @@ -113,4 +113,5 @@ except SpackError, e: tty.die(e.message) except KeyboardInterrupt: + sys.stderr.write('\n') tty.die("Keyboard interrupt.") diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index f9218b9df1..3f2a9aa745 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -85,24 +85,24 @@ def checksum(parser, args): pkg = spack.db.get(args.package) # If the user asked for specific versions, use those. - versions = [ver(v) for v in args.versions] - - if not all(type(v) == Version for v in versions): - tty.die("Cannot generate checksums for version lists or " + - "version ranges. Use unambiguous versions.") - - if not versions: - versions = pkg.fetch_available_versions() + if args.versions: + versions = {} + for v in args.versions: + v = ver(v) + if not isinstance(v, Version): + tty.die("Cannot generate checksums for version lists or " + + "version ranges. Use unambiguous versions.") + versions[v] = pkg.url_for_version(v) + else: + versions = pkg.fetch_remote_versions() if not versions: - tty.die("Could not fetch any available versions for %s." % pkg.name) - - versions = list(reversed(sorted(versions))) - urls = [pkg.url_for_version(v) for v in versions] + tty.die("Could not fetch any versions for %s." % pkg.name) + sorted_versions = list(reversed(sorted(versions))) - tty.msg("Found %s versions of %s." % (len(urls), pkg.name), + tty.msg("Found %s versions of %s." % (len(versions), pkg.name), *spack.cmd.elide_list( - ["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) + ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) print archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') @@ -112,10 +112,12 @@ def checksum(parser, args): return version_hashes = get_checksums( - versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage) + sorted_versions[:archives_to_fetch], + [versions[v] for v in sorted_versions[:archives_to_fetch]], + keep_stage=args.keep_stage) if not version_hashes: - tty.die("Could not fetch any available versions for %s." % pkg.name) + tty.die("Could not fetch any versions for %s." % pkg.name) version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes] tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 7ac10285a4..1b9ad524c4 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -159,13 +159,12 @@ def create(parser, args): else: mkdirp(os.path.dirname(pkg_path)) - versions = list(reversed(spack.package.find_versions_of_archive(url))) + versions = spack.package.find_versions_of_archive(url) archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided - versions = [version] - urls = [url] + versions = { version : url } else: urls = [spack.url.substitute_version(url, v) for v in versions] if len(urls) > 1: @@ -181,6 +180,8 @@ def create(parser, args): tty.msg("Aborted.") return + sorted_versions = list(reversed(versions)) + guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( versions[:archives_to_fetch], urls[:archives_to_fetch], diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py index c545035279..ed16728261 100644 --- a/lib/spack/spack/cmd/versions.py +++ b/lib/spack/spack/cmd/versions.py @@ -24,6 +24,7 @@ ############################################################################## import os from llnl.util.tty.colify import colify +import llnl.util.tty as tty import spack description ="List available versions of a package" @@ -34,4 +35,21 @@ def setup_parser(subparser): def versions(parser, args): pkg = spack.db.get(args.package) - colify(reversed(pkg.fetch_available_versions())) + + safe_versions = pkg.versions + fetched_versions = pkg.fetch_remote_versions() + remote_versions = set(fetched_versions).difference(safe_versions) + + tty.msg("Safe versions (already checksummed):") + colify(sorted(safe_versions, reverse=True), indent=2) + + tty.msg("Remote versions (not yet checksummed):") + if not remote_versions: + if not fetched_versions: + print " Found no versions for %s" % pkg.name + tty.debug("Check the list_url and list_depth attribute on the " + "package to help Spack find versions.") + else: + print " Found no unckecksummed versions for %s" % pkg.name + else: + colify(sorted(remote_versions, reverse=True), indent=2) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index eee8cb7fde..805604368e 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -68,7 +68,7 @@ class DefaultConcretizer(object): # If there are known avaialble versions, return the most recent # version that satisfies the spec pkg = spec.package - valid_versions = [v for v in pkg.available_versions + valid_versions = [v for v in pkg.versions if any(v.satisfies(sv) for sv in spec.versions)] if valid_versions: diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 7cf94ed1ef..58d62a7deb 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -39,7 +39,7 @@ import inspect import subprocess import platform as py_platform import multiprocessing -from urlparse import urlparse +from urlparse import urlparse, urljoin import llnl.util.tty as tty from llnl.util.filesystem import * @@ -333,9 +333,6 @@ class Package(object): if '.' in self.name: self.name = self.name[self.name.rindex('.') + 1:] - # This is set by scraping a web page. - self._available_versions = None - # Sanity check some required variables that could be # overridden by package authors. def ensure_has_dict(attr_name): @@ -370,14 +367,15 @@ class Package(object): # Init fetch strategy and url to None self._fetcher = None - self.url = None + self.url = getattr(self.__class__, 'url', None) # Fix up self.url if this package fetches with a URLFetchStrategy. # This makes self.url behave sanely. if self.spec.versions.concrete: - # TODO: this is a really roundabout way of determining the type of fetch to do. - # TODO: figure out a more sane fetch strategy/package init order - # TODO: (right now it's conflated with stage, package, and the tests make assumptions) + # TODO: this is a really roundabout way of determining the type + # TODO: of fetch to do. figure out a more sane fetch strategy/package + # TODO: init order (right now it's conflated with stage, package, and + # TODO: the tests make assumptions) f = fs.for_package_version(self, self.version) if isinstance(f, fs.URLFetchStrategy): self.url = self.url_for_version(self.spec.version) @@ -852,71 +850,70 @@ class Package(object): self.stage.destroy() - def fetch_available_versions(self): - if not hasattr(self, 'url'): - raise VersionFetchError(self.__class__) - - # If not, then try to fetch using list_url - if not self._available_versions: - try: - self._available_versions = find_versions_of_archive( - self.url, - list_url=self.list_url, - list_depth=self.list_depth) - - if not self._available_versions: - tty.warn("Found no versions for %s" % self.name, - "Check the list_url and list_depth attribute on the " - + self.name + " package.", - "Use them to tell Spack where to look for versions.") + @property + def all_urls(self): + urls = [] + if self.url: + urls.append(self.url) - except spack.error.NoNetworkConnectionError, e: - tty.die("Package.fetch_available_versions couldn't connect to:", - e.url, e.message) + for args in self.versions.values(): + if 'url' in args: + urls.append(args['url']) + return urls - return self._available_versions + def fetch_remote_versions(self): + """Try to find remote versions of this package using the + list_url and any other URLs described in the package file.""" + if not self.all_urls: + raise VersionFetchError(self.__class__) - @property - def available_versions(self): - # If the package overrode available_versions, then use that. - if self.versions is not None: - return VersionList(self.versions.keys()) - else: - vlist = self.fetch_available_versions() - if not vlist: - vlist = ver([self.version]) - return vlist + try: + return find_versions_of_archive( + *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) + except spack.error.NoNetworkConnectionError, e: + tty.die("Package.fetch_versions couldn't connect to:", + e.url, e.message) -def find_versions_of_archive(archive_url, **kwargs): +def find_versions_of_archive(*archive_urls, **kwargs): list_url = kwargs.get('list_url', None) list_depth = kwargs.get('list_depth', 1) - if not list_url: - list_url = url.find_list_url(archive_url) - - # This creates a regex from the URL with a capture group for the - # version part of the URL. The capture group is converted to a - # generic wildcard, so we can use this to extract things on a page - # that look like archive URLs. - url_regex = url.wildcard_version(archive_url) - - # We'll be a bit more liberal and just look for the archive part, - # not the full path. - archive_regex = os.path.basename(url_regex) + # Generate a list of list_urls based on archive urls and any + # explicitly listed list_url in the package + list_urls = set() + if list_url: + list_urls.add(list_url) + for aurl in archive_urls: + list_urls.add(url.find_list_url(aurl)) # Grab some web pages to scrape. - page_map = get_pages(list_url, depth=list_depth) + page_map = {} + for lurl in list_urls: + page_map.update(get_pages(lurl, depth=list_depth)) + + # Scrape them for archive URLs + regexes = [] + for aurl in archive_urls: + # This creates a regex from the URL with a capture group for + # the version part of the URL. The capture group is converted + # to a generic wildcard, so we can use this to extract things + # on a page that look like archive URLs. + url_regex = url.wildcard_version(aurl) + + # We'll be a bit more liberal and just look for the archive + # part, not the full path. + regexes.append(os.path.basename(url_regex)) # Build a version list from all the matches we find - versions = VersionList() - for site, page in page_map.iteritems(): + versions = {} + for page_url, content in page_map.iteritems(): # extract versions from matches. - matches = re.finditer(archive_regex, page) - version_strings = set(m.group(1) for m in matches) - for v in version_strings: - versions.add(Version(v)) + for regex in regexes: + versions.update( + (Version(m.group(1)), urljoin(page_url, m.group(0))) + for m in re.finditer(regex, content)) return versions @@ -979,8 +976,8 @@ class VersionFetchError(PackageError): """Raised when a version URL cannot automatically be determined.""" def __init__(self, cls): super(VersionFetchError, self).__init__( - "Cannot fetch version for package %s " % cls.__name__ + - "because it does not define a default url.") + "Cannot fetch versions for package %s " % cls.__name__ + + "because it does not define any URLs to fetch.") class NoURLError(PackageError): diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index e2fbb19f5d..a0410131b0 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -245,6 +245,10 @@ def wildcard_version(path): # Construct a case-insensitive regular expression for the package name. name_re = '(%s)' % insensitize(name) + # protect extensions like bz2 from wildcarding. + ext = comp.extension(path) + path = comp.strip_extension(path) + # Split the string apart by things that match the name so that if the # name contains numbers or things that look like versions, we don't # catch them with the version wildcard. @@ -261,4 +265,4 @@ def wildcard_version(path): name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts) # Put it all back together with original name matches intact. - return ''.join(name_parts) + return ''.join(name_parts) + '.' + ext diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index ba42cb37b5..1420d62a77 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -25,7 +25,7 @@ import re import sys import subprocess -import urllib2 +import urllib2, cookielib import urlparse from multiprocessing import Pool from HTMLParser import HTMLParser, HTMLParseError @@ -68,7 +68,7 @@ def _spider(args): pool. Firing off all the child links at once makes the fetch MUCH faster for pages with lots of children. """ - url, depth, max_depth, raise_on_error = args + url, visited, root, opener, depth, max_depth, raise_on_error = args pages = {} try: @@ -82,12 +82,12 @@ def _spider(args): resp = urllib2.urlopen(req, timeout=TIMEOUT) if not "Content-type" in resp.headers: - tty.warn("ignoring page " + url) + tty.debug("ignoring page " + url) return pages if not resp.headers["Content-type"].startswith('text/html'): - tty.warn("ignoring page " + url + " with content type " + - resp.headers["Content-type"]) + tty.debug("ignoring page " + url + " with content type " + + resp.headers["Content-type"]) return pages # Do the real GET request when we know it's just HTML. @@ -114,15 +114,30 @@ def _spider(args): # Evaluate the link relative to the page it came from. abs_link = urlparse.urljoin(response_url, raw_link) - subcalls.append((abs_link, depth+1, max_depth, raise_on_error)) + + # Skip things outside the root directory + if not abs_link.startswith(root): + continue + + # Skip already-visited links + if abs_link in visited: + continue + + subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error)) + visited.add(abs_link) if subcalls: - pool = Pool(processes=len(subcalls)) - dicts = pool.map(_spider, subcalls) - for d in dicts: - pages.update(d) + try: + pool = Pool(processes=len(subcalls)) + dicts = pool.map(_spider, subcalls) + for d in dicts: + pages.update(d) + finally: + pool.terminate() + pool.join() except urllib2.URLError, e: + tty.debug(e) if raise_on_error: raise spack.error.NoNetworkConnectionError(str(e), url) @@ -137,7 +152,8 @@ def _spider(args): tty.warn(msg, url, "HTMLParseError: " + str(e)) except Exception, e: - pass # Other types of errors are completely ignored. + # Other types of errors are completely ignored, except in debug mode. + tty.debug("Error in _spider: %s" % e) return pages @@ -151,5 +167,5 @@ def get_pages(root_url, **kwargs): performance over a sequential fetch. """ max_depth = kwargs.setdefault('depth', 1) - pages = _spider((root_url, 1, max_depth, False)) + pages = _spider((root_url, set(), root_url, None, 1, max_depth, False)) return pages -- cgit v1.2.3-70-g09d2 From 9033ae646006bf6c4c1e482c024af9217fcdea99 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 7 Nov 2014 00:20:39 -0800 Subject: Add package for Sandia QThreads. --- var/spack/packages/qthreads/package.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 var/spack/packages/qthreads/package.py diff --git a/var/spack/packages/qthreads/package.py b/var/spack/packages/qthreads/package.py new file mode 100644 index 0000000000..dacdb71524 --- /dev/null +++ b/var/spack/packages/qthreads/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Qthreads(Package): + """The qthreads API is designed to make using large numbers of + threads convenient and easy, and to allow portable access to + threading constructs used in massively parallel shared memory + environments. The API maps well to both MTA-style threading and + PIM-style threading, and we provide an implementation of this + interface in both a standard SMP context as well as the SST + context. The qthreads API provides access to full/empty-bit + (FEB) semantics, where every word of memory can be marked + either full or empty, and a thread can wait for any word to + attain either state.""" + homepage = "http://www.cs.sandia.gov/qthreads/" + url = "https://qthreads.googlecode.com/files/qthread-1.10.tar.bz2" + + version('1.10', '5af8c8bbe88c2a6d45361643780d1671') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 57076f6ca4ae37cf2d0cc91fa7b4de688df1be1c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 8 Nov 2014 11:42:54 -0800 Subject: URL parsing improvements --- lib/spack/env/cc | 10 ++++- lib/spack/spack/spec.py | 5 ++- lib/spack/spack/test/url_extrapolate.py | 38 ++++++++++++++----- lib/spack/spack/url.py | 67 ++++++++++++++++++--------------- var/spack/packages/jpeg/package.py | 2 +- var/spack/packages/openmpi/package.py | 6 +-- 6 files changed, 81 insertions(+), 47 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index f68cb0b2cd..19ca31cace 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -275,9 +275,15 @@ for dir in "${libraries[@]}"; do args+=("-L$dir"); done for lib in "${libs[@]}"; do args+=("-l$lib"); done if [ "$mode" = ccld ]; then - for dir in "${rpaths[@]}"; do args+=("-Wl,-rpath=$dir"); done + for dir in "${rpaths[@]}"; do + args+=("-Wl,-rpath") + args+=("-Wl,$dir"); + done elif [ "$mode" = ld ]; then - for dir in "${rpaths[@]}"; do args+=("-rpath=$dir"); done + for dir in "${rpaths[@]}"; do + args+=("-rpath") + args+=("$dir"); + done fi # diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a0ab38c049..570bb1191c 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1096,8 +1096,9 @@ class Spec(object): def __contains__(self, spec): - """True if this spec has any dependency that satisfies the supplied - spec.""" + """True if this spec satisfis the provided spec, or if any dependency + does. If the spec has no name, then we parse this one first. + """ spec = self._autospec(spec) for s in self.traverse(): if s.satisfies(spec): diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index c30ff1f009..71aa249e49 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -34,15 +34,35 @@ from spack.test.mock_packages_test import * class UrlExtrapolateTest(MockPackagesTest): - def test_known_version(self): - d = spack.db.get('dyninst') - - self.assertEqual( - d.url_for_version('8.2'), 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') + def test_libelf_version(self): + base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" + self.assertEqual(url.substitute_version(base, '0.8.13'), base) + self.assertEqual(url.substitute_version(base, '0.8.12'), + "http://www.mr511.de/software/libelf-0.8.12.tar.gz") + self.assertEqual(url.substitute_version(base, '0.3.1'), + "http://www.mr511.de/software/libelf-0.3.1.tar.gz") + self.assertEqual(url.substitute_version(base, '1.3.1b'), + "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") + + + def test_libdwarf_version(self): + base = "http://www.prevanders.net/libdwarf-20130729.tar.gz" + self.assertEqual(url.substitute_version(base, '20130729'), base) + self.assertEqual(url.substitute_version(base, '8.12'), + "http://www.prevanders.net/libdwarf-8.12.tar.gz") + + + def test_dyninst_version(self): + # Dyninst has a version twice in the URL. + base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" + self.assertEqual(url.substitute_version(base, '8.1.2'), base) self.assertEqual( - d.url_for_version('8.1.2'), 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz') + url.substitute_version(base, '8.2'), + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz") + self.assertEqual( - d.url_for_version('8.1.1'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') + url.substitute_version(base, '8.3.1'), + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz") def test_extrapolate_version(self): @@ -59,8 +79,8 @@ class UrlExtrapolateTest(MockPackagesTest): # 8.2 matches both the release8.2 component and the DyninstAPI-8.2 component. # Extrapolation should replace both with the new version. # TODO: figure out a consistent policy for this. - # self.assertEqual( - # d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz') + self.assertEqual( + d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz') def test_with_package(self): diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index a0410131b0..1c0c0d2438 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -57,27 +57,6 @@ from spack.version import Version # "path" seemed like the most generic term. # -class UrlParseError(spack.error.SpackError): - """Raised when the URL module can't parse something correctly.""" - def __init__(self, msg, path): - super(UrlParseError, self).__init__(msg) - self.path = path - - -class UndetectableVersionError(UrlParseError): - """Raised when we can't parse a version from a string.""" - def __init__(self, path): - super(UndetectableVersionError, self).__init__( - "Couldn't detect version in: " + path, path) - - -class UndetectableNameError(UrlParseError): - """Raised when we can't parse a package name from a string.""" - def __init__(self, path): - super(UndetectableNameError, self).__init__( - "Couldn't parse package name in: " + path, path) - - def find_list_url(url): """Finds a good list URL for the supplied URL. This depends on the site. By default, just assumes that a good list URL is the @@ -98,7 +77,7 @@ def find_list_url(url): return os.path.dirname(url) -def parse_version_string_with_indices(path): +def parse_version_offset(path): """Try to extract a version string from a filename or URL. This is taken largely from Homebrew's Version class.""" @@ -112,6 +91,7 @@ def parse_version_string_with_indices(path): # Take basename to avoid including parent dirs in version name # Remember the offset of the stem in the full path. stem = os.path.basename(path) + offset = len(path) - len(stem) version_types = [ # GitHub tarballs, e.g. v1.2.3 @@ -172,13 +152,13 @@ def parse_version_string_with_indices(path): # e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz (r'\.v(\d+[a-z]?)', stem)] - for vtype in version_types: + for i, vtype in enumerate(version_types): regex, match_string = vtype[:2] match = re.search(regex, match_string) if match and match.group(1) is not None: version = match.group(1) - start = path.index(version) - return version, start, start+len(version) + start = offset + match.start(1) + return version, start, len(version) raise UndetectableVersionError(path) @@ -187,11 +167,11 @@ def parse_version(path): """Given a URL or archive name, extract a version from it and return a version object. """ - ver, start, end = parse_version_string_with_indices(path) + ver, start, l = parse_version_offset(path) return Version(ver) -def parse_name(path, ver=None): +def parse_name_offset(path, ver=None): if ver is None: ver = parse_version(path) @@ -207,10 +187,16 @@ def parse_name(path, ver=None): for nt in ntypes: match = re.search(nt, path) if match: - return match.group(1) + name = match.group(1) + return name, match.start(1), len(name) raise UndetectableNameError(path) +def parse_name(path, ver=None): + name, start, l = parse_name_offset(path, ver) + return name + + def parse_name_and_version(path): ver = parse_version(path) name = parse_name(path, ver) @@ -231,8 +217,8 @@ def substitute_version(path, new_version): """Given a URL or archive name, find the version in the path and substitute the new version for it. """ - ver, start, end = parse_version_string_with_indices(path) - return path[:start] + str(new_version) + path[end:] + ver, start, l = parse_version_offset(path) + return path[:start] + str(new_version) + path[(start+l):] def wildcard_version(path): @@ -266,3 +252,24 @@ def wildcard_version(path): # Put it all back together with original name matches intact. return ''.join(name_parts) + '.' + ext + + +class UrlParseError(spack.error.SpackError): + """Raised when the URL module can't parse something correctly.""" + def __init__(self, msg, path): + super(UrlParseError, self).__init__(msg) + self.path = path + + +class UndetectableVersionError(UrlParseError): + """Raised when we can't parse a version from a string.""" + def __init__(self, path): + super(UndetectableVersionError, self).__init__( + "Couldn't detect version in: " + path, path) + + +class UndetectableNameError(UrlParseError): + """Raised when we can't parse a package name from a string.""" + def __init__(self, path): + super(UndetectableNameError, self).__init__( + "Couldn't parse package name in: " + path, path) diff --git a/var/spack/packages/jpeg/package.py b/var/spack/packages/jpeg/package.py index b34fd5cb2d..bb5b77e01c 100644 --- a/var/spack/packages/jpeg/package.py +++ b/var/spack/packages/jpeg/package.py @@ -5,7 +5,7 @@ class Jpeg(Package): homepage = "http://www.ijg.org" url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" - version('9', 'b397211ddfd506b92cd5e02a22ac924d') + version('9a', 'b397211ddfd506b92cd5e02a22ac924d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index 1ef8a8f000..7e84cbaf65 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -13,9 +13,9 @@ class Openmpi(Package): version('1.8.2', 'ab538ed8e328079d566fc797792e016e', url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz') - version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475', url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2") + patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5") patch('llnl-platforms.patch', when="@1.6.5") @@ -27,8 +27,8 @@ class Openmpi(Package): # TODO: use variants for this, e.g. +lanl, +llnl, etc. # use this for LANL builds, but for LLNL builds, we need: # "--with-platform=contrib/platform/llnl/optimized" - if self.version == ver("1.6.5"): - confg_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") + if self.version == ver("1.6.5") and '+lanl' in spec: + config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") # TODO: Spack should make it so that you can't actually find # these compilers if they're "disabled" for the current -- cgit v1.2.3-70-g09d2 From 1da5d12bdd79e42aafdc3f8dda136fc75bc92257 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 8 Nov 2014 22:08:15 -0800 Subject: 'spack urls' debugging command, more consistent URL extrapolation. - spack urls inspects all URLs in packages, prints out colors to show how they are parased. - URL extrapolation test added. - Extrapolation is more consistent now. - Extrapolation handles more complex URLs. - More test cases for extrapolation. --- lib/spack/spack/cmd/urls.py | 58 +++++++++ lib/spack/spack/test/url_extrapolate.py | 119 ++++++++---------- lib/spack/spack/test/url_parse.py | 27 ++++ lib/spack/spack/url.py | 211 ++++++++++++++++++++++++++++---- 4 files changed, 323 insertions(+), 92 deletions(-) create mode 100644 lib/spack/spack/cmd/urls.py diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py new file mode 100644 index 0000000000..417ce3ab68 --- /dev/null +++ b/lib/spack/spack/cmd/urls.py @@ -0,0 +1,58 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys +import spack +import spack.url + +description = "Inspect urls used by packages in spack." + +def setup_parser(subparser): + subparser.add_argument( + '-c', '--color', action='store_true', + help="Color the parsed version and name in the urls shown. " + "Version will be cyan, name red.") + subparser.add_argument( + '-e', '--extrapolation', action='store_true', + help="Color the versions used for extrapolation as well." + "Additional versions are green, names magenta.") + + +def urls(parser, args): + urls = set() + for pkg in spack.db.all_packages(): + url = getattr(pkg.__class__, 'url', None) + if url: + urls.add(url) + + for params in pkg.versions.values(): + url = params.get('url', None) + if url: + urls.add(url) + + for url in sorted(urls): + if args.color or args.extrapolation: + print spack.url.color_url(url, subs=args.extrapolation, errors=True) + else: + print url diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index 71aa249e49..d381c1a1e4 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -32,80 +32,69 @@ from spack.version import ver from spack.test.mock_packages_test import * -class UrlExtrapolateTest(MockPackagesTest): +class UrlExtrapolateTest(unittest.TestCase): + + def check_url(self, base, version, new_url): + self.assertEqual(url.substitute_version(base, version), new_url) + def test_libelf_version(self): base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" - self.assertEqual(url.substitute_version(base, '0.8.13'), base) - self.assertEqual(url.substitute_version(base, '0.8.12'), - "http://www.mr511.de/software/libelf-0.8.12.tar.gz") - self.assertEqual(url.substitute_version(base, '0.3.1'), - "http://www.mr511.de/software/libelf-0.3.1.tar.gz") - self.assertEqual(url.substitute_version(base, '1.3.1b'), - "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") + self.check_url(base, '0.8.13', base) + self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz") + self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz") + self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") def test_libdwarf_version(self): base = "http://www.prevanders.net/libdwarf-20130729.tar.gz" - self.assertEqual(url.substitute_version(base, '20130729'), base) - self.assertEqual(url.substitute_version(base, '8.12'), - "http://www.prevanders.net/libdwarf-8.12.tar.gz") + self.check_url(base, '20130729', base) + self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz") def test_dyninst_version(self): # Dyninst has a version twice in the URL. base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" - self.assertEqual(url.substitute_version(base, '8.1.2'), base) - self.assertEqual( - url.substitute_version(base, '8.2'), - "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz") - - self.assertEqual( - url.substitute_version(base, '8.3.1'), - "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz") - - - def test_extrapolate_version(self): - d = spack.db.get('dyninst') - - # Nearest URL for 8.1.1.5 is 8.1.1, and the URL there is - # release8.1/DyninstAPI-8.1.1.tgz. Only the last part matches - # the version, so only extrapolate the last part. Obviously - # dyninst has ambiguous URL versions, but we want to make sure - # extrapolation works in a well-defined way. - self.assertEqual( - d.url_for_version('8.1.1.5'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.5.tgz') - - # 8.2 matches both the release8.2 component and the DyninstAPI-8.2 component. - # Extrapolation should replace both with the new version. - # TODO: figure out a consistent policy for this. - self.assertEqual( - d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz') - - - def test_with_package(self): - d = spack.db.get('dyninst@8.2') - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') - - d = spack.db.get('dyninst@8.1.2') - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz') - - d = spack.db.get('dyninst@8.1.1') - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') - - - def test_concrete_package(self): - s = Spec('dyninst@8.2') - s.concretize() - d = spack.db.get(s) - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') - - s = Spec('dyninst@8.1.2') - s.concretize() - d = spack.db.get(s) - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz') - - s = Spec('dyninst@8.1.1') - s.concretize() - d = spack.db.get(s) - self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') + self.check_url(base, '8.1.2', base) + self.check_url(base, '8.2', + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz") + self.check_url(base, '8.3.1', + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz") + + + def test_partial_version_prefix(self): + # Test now with a partial prefix earlier in the URL -- this is + # hard to figure out so Spack only substitutes the last + # instance of the version. + base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.2.tgz" + self.check_url(base, '8.1.2', base) + self.check_url(base, '8.1.4', + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.4.tgz") + self.check_url(base, '8.2', + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.2.tgz") + self.check_url(base, '8.3.1', + "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz") + + + def test_scalasca_partial_version(self): + # Note that this probably doesn't actually work, but sites are + # inconsistent about their directory structure, so it's not + # clear what is right. This test is for consistency and to + # document behavior. If you figure out a good way to handle + # this case, fix the tests too. + self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1', + 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz') + self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1', + 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz') + + + def test_mpileaks_version(self): + self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3', + 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz') + + + def test_gcc(self): + self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7', + 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2') + self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7', + 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2') diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index 7a4d201d90..b8cca1e52a 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -295,3 +295,30 @@ class UrlParseTest(unittest.TestCase): self.check( 'hdf5', '1.8.13', 'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2') + + def test_scalasca_version(self): + self.check( + 'cube', '4.2.3', + 'http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz') + self.check( + 'cube', '4.3-TP1', + 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz') + + def test_mpileaks_version(self): + self.check( + 'mpileaks', '1.0', + 'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz') + self.check( + 'mpileaks', '1.0', + 'https://github.com/hpc/mpileaks/releases/download/1.0/mpileaks-1.0.tar.gz') + + def test_gcc_version(self): + self.check( + 'gcc', '4.4.7', + 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2') + + def test_gcc_version_precedence(self): + # prefer the version in the tarball, not in the url prefix. + self.check( + 'gcc', '4.4.7', + 'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2') diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 1c0c0d2438..27ef6f3b29 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -46,6 +46,9 @@ it's never been told about that version before. """ import os import re +from StringIO import StringIO + +from llnl.util.tty.color import * import spack.error import spack.util.compression as comp @@ -112,6 +115,10 @@ def parse_version_offset(path): # e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style) (r'[-_](R\d+[AB]\d*(-\d+)?)', path), + # e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz + # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz + (r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path), + # e.g. boost_1_39_0 (r'((\d+_)+\d+)$', stem), @@ -126,7 +133,7 @@ def parse_version_offset(path): (r'-((\d+\.)*\d+)$', stem), # e.g. foobar-4.5.1b - (r'-((\d+\.)*\d+([a-z]|rc|RC)\d*)$', stem), + (r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem), # e.g. foobar-4.5.0-beta1, or foobar-4.50-beta (r'-((\d+\.)*\d+-beta(\d+)?)$', stem), @@ -153,11 +160,16 @@ def parse_version_offset(path): (r'\.v(\d+[a-z]?)', stem)] for i, vtype in enumerate(version_types): - regex, match_string = vtype[:2] + regex, match_string = vtype match = re.search(regex, match_string) if match and match.group(1) is not None: version = match.group(1) - start = offset + match.start(1) + start = match.start(1) + + # if we matched from the basename, then add offset in. + if match_string is stem: + start += offset + return version, start, len(version) raise UndetectableVersionError(path) @@ -171,24 +183,46 @@ def parse_version(path): return Version(ver) -def parse_name_offset(path, ver=None): - if ver is None: - ver = parse_version(path) +def parse_name_offset(path, v=None): + if v is None: + v = parse_version(path) - ntypes = (r'/sourceforge/([^/]+)/', - r'/([^/]+)/(tarball|zipball)/', - r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % ver, - r'github.com/[^/]+/([^/]+)/archive', - r'/([^/]+)[_.-]v?%s' % ver, - r'/([^/]+)%s' % ver, - r'^([^/]+)[_.-]v?%s' % ver, - r'^([^/]+)%s' % ver) + # Strip archive extension + path = comp.strip_extension(path) - for nt in ntypes: - match = re.search(nt, path) + # Allow matching with either path or stem, as with the version. + stem = os.path.basename(path) + offset = len(path) - len(stem) + + name_types = [ + (r'/sourceforge/([^/]+)/', path), + (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path), + (r'/([^/]+)/(tarball|zipball)/', path), + (r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path), + (r'github.com/[^/]+/([^/]+)/archive', path), + + (r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem + (r'([^/]+)%s' % v, stem), + + (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem. + (r'/([^/]+)%s' % v, path), + + (r'^([^/]+)[_.-]v?%s' % v, path), + (r'^([^/]+)%s' % v, path)] + + for i, name_type in enumerate(name_types): + regex, match_string = name_type + match = re.search(regex, match_string) if match: - name = match.group(1) - return name, match.start(1), len(name) + name = match.group(1) + start = match.start(1) + + # if we matched from the basename, then add offset in. + if match_string is stem: + start += offset + + return name, start, len(name) + raise UndetectableNameError(path) @@ -204,7 +238,7 @@ def parse_name_and_version(path): def insensitize(string): - """Chagne upper and lowercase letters to be case insensitive in + """Change upper and lowercase letters to be case insensitive in the provided string. e.g., 'a' because '[Aa]', 'B' becomes '[bB]', etc. Use for building regexes.""" def to_ins(match): @@ -213,12 +247,53 @@ def insensitize(string): return re.sub(r'([a-zA-Z])', to_ins, string) -def substitute_version(path, new_version): - """Given a URL or archive name, find the version in the path and substitute - the new version for it. +def cumsum(elts, init=0, fn=lambda x:x): + """Return cumulative sum of result of fn on each element in elts.""" + sums = [] + s = init + for i, e in enumerate(elts): + sums.append(s) + s += fn(e) + return sums + + +def substitution_offsets(path): + """This returns offsets for substituting versions and names in the provided path. + It is a helper for substitute_version(). """ - ver, start, l = parse_version_offset(path) - return path[:start] + str(new_version) + path[(start+l):] + # Get name and version offsets + try: + ver, vs, vl = parse_version_offset(path) + name, ns, nl = parse_name_offset(path, ver) + except UndetectableNameError, e: + return (None, -1, -1, (), ver, vs, vl, (vs,)) + except UndetectableVersionError, e: + return (None, -1, -1, (), None, -1, -1, ()) + + # protect extensions like bz2 from getting inadvertently + # considered versions. + ext = comp.extension(path) + path = comp.strip_extension(path) + + # Construct a case-insensitive regular expression for the package name. + name_re = '(%s)' % insensitize(name) + + # Split the string apart by things that match the name so that if the + # name contains numbers or things that look like versions, we don't + # accidentally substitute them with a version. + name_parts = re.split(name_re, path) + + offsets = cumsum(name_parts, 0, len) + name_offsets = offsets[1::2] + + ver_offsets = [] + for i in xrange(0, len(name_parts), 2): + vparts = re.split(ver, name_parts[i]) + voffsets = cumsum(vparts, offsets[i], len) + ver_offsets.extend(voffsets[1::2]) + + return (name, ns, nl, tuple(name_offsets), + ver, vs, vl, tuple(ver_offsets)) def wildcard_version(path): @@ -228,13 +303,13 @@ def wildcard_version(path): # Get name and version, so we can treat them specially name, v = parse_name_and_version(path) - # Construct a case-insensitive regular expression for the package name. - name_re = '(%s)' % insensitize(name) - # protect extensions like bz2 from wildcarding. ext = comp.extension(path) path = comp.strip_extension(path) + # Construct a case-insensitive regular expression for the package name. + name_re = '(%s)' % insensitize(name) + # Split the string apart by things that match the name so that if the # name contains numbers or things that look like versions, we don't # catch them with the version wildcard. @@ -254,6 +329,88 @@ def wildcard_version(path): return ''.join(name_parts) + '.' + ext +def substitute_version(path, new_version): + """Given a URL or archive name, find the version in the path and + substitute the new version for it. Replace all occurrences of + the version *if* they don't overlap with the package name. + + Simple example:: + substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3') + ->'http://www.mr511.de/software/libelf-2.9.3.tar.gz' + + Complex examples:: + substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz', 2.1) + -> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz' + + # In this string, the "2" in mvapich2 is NOT replaced. + substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.tar.gz', 2.1) + -> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz' + + """ + (name, ns, nl, noffs, + ver, vs, vl, voffs) = substitution_offsets(path) + + new_path = '' + last = 0 + for vo in voffs: + new_path += path[last:vo] + new_path += str(new_version) + last = vo + vl + + new_path += path[last:] + return new_path + + +def color_url(path, **kwargs): + """Color the parts of the url according to Spack's parsing. + + Colors are: + Cyan: The version found by parse_version_offset(). + Red: The name found by parse_name_offset(). + + Green: Instances of version string substituted by substitute_version(). + Magenta: Instances of the name (protected from substitution). + + Optional args: + errors=True Append parse errors at end of string. + subs=True Color substitutions as well as parsed name/version. + + """ + errors = kwargs.get('errors', False) + subs = kwargs.get('subs', False) + + (name, ns, nl, noffs, + ver, vs, vl, voffs) = substitution_offsets(path) + + nends = [no + nl - 1 for no in noffs] + vends = [vo + vl - 1 for vo in voffs] + + nerr = verr = 0 + out = StringIO() + for i in range(len(path)): + if i == vs: out.write('@c'); verr += 1 + elif i == ns: out.write('@r'); nerr += 1 + elif subs: + if i in voffs: out.write('@g') + elif i in noffs: out.write('@m') + + out.write(path[i]) + + if i == vs + vl - 1: out.write('@.'); verr += 1 + elif i == ns + nl - 1: out.write('@.'); nerr += 1 + elif subs: + if i in vends or i in nends: + out.write('@.') + + if errors: + if nerr == 0: out.write(" @r{[no name]}") + if verr == 0: out.write(" @r{[no version]}") + if nerr == 1: out.write(" @r{[incomplete name]}") + if verr == 1: out.write(" @r{[incomplete version]}") + + return colorize(out.getvalue()) + + class UrlParseError(spack.error.SpackError): """Raised when the URL module can't parse something correctly.""" def __init__(self, msg, path): -- cgit v1.2.3-70-g09d2 From 1a424c124cf49598cbefc41231ce8fa5e0f33079 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 8 Nov 2014 22:18:08 -0800 Subject: Python 2.6 fix for Mac OS --- lib/spack/spack/architecture.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 9b4c6e576d..0c4b605e91 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -65,7 +65,7 @@ def get_mac_sys_type(): if not mac_ver: return None - return "macosx_{}_{}".format( + return "macosx_%s_%s" % ( Version(mac_ver).up_to(2), py_platform.machine()) -- cgit v1.2.3-70-g09d2 From 0d044cdc1b70f9e522b61b7b5cb8f03d188a686b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 8 Nov 2014 22:18:20 -0800 Subject: Shorter help strings. --- lib/spack/spack/cmd/checksum.py | 2 +- lib/spack/spack/cmd/pkg.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 3f2a9aa745..846817b918 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -38,7 +38,7 @@ import spack.util.crypto from spack.stage import Stage, FailedDownloadError from spack.version import * -description ="Checksum available versions of a package to update a package file." +description ="Checksum available versions of a package." def setup_parser(subparser): subparser.add_argument( diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index da905603fa..055b7c2062 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -31,7 +31,7 @@ from llnl.util.tty.colify import colify import spack from spack.util.executable import * -description = "Query packages associated with particular git revisions in spack." +description = "Query packages associated with particular git revisions." def setup_parser(subparser): sp = subparser.add_subparsers( -- cgit v1.2.3-70-g09d2 From eba13b865314b4de0b8767acf5232713d6deff75 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 8 Nov 2014 23:20:01 -0800 Subject: Checksum warning now prompts for override. --- lib/spack/llnl/util/tty/__init__.py | 27 +++++++++++++++++++++++++++ lib/spack/spack/package.py | 13 ++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 40cae11200..652d1cc1a8 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -114,6 +114,33 @@ def get_number(prompt, **kwargs): return number +def get_yes_or_no(prompt, **kwargs): + default_value = kwargs.get('default', None) + + if default_value is None: + prompt += ' [y/n] ' + elif default_value is True: + prompt += ' [Y/n] ' + elif default_value is False: + prompt += ' [y/N] ' + else: + raise ValueError("default for get_yes_no() must be True, False, or None.") + + result = None + while result is None: + ans = raw_input(prompt).lower() + if not ans: + result = default_value + if result is None: + print "Please enter yes or no." + else: + if ans == 'y' or ans == 'yes': + result = True + elif ans == 'n' or ans == 'no': + result = False + return result + + def hline(label=None, **kwargs): """Draw an optionally colored or labeled horizontal line. Options: diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b5340fa226..e7905cb410 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -613,11 +613,14 @@ class Package(object): raise ValueError("Can only fetch concrete packages.") if spack.do_checksum and not self.version in self.versions: - raise FetchError( - "Cannot fetch %s safely; there is no checksum on file for version %s." - % (self.name, self.version), - "Add a checksum to the package file, or use --no-checksum to " - "skip this check.") + tty.warn("There is no checksum on file to fetch %s safely." + % self.spec.format('$_$@')) + ignore = tty.get_yes_or_no(" Fetch anyway?", default=False) + msg = "Add a checksum or use --no-checksum to skip this check." + if ignore: + tty.msg("Fetching with no checksum.", msg) + else: + raise FetchError("Will not fetch %s." % self.spec.format('$_$@'), msg) self.stage.fetch() -- cgit v1.2.3-70-g09d2 From 321a3a55c73b4f48a5b51f19736d70484cfd4078 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 16 Nov 2014 15:26:00 -0800 Subject: Prompt the user about checksums only if interactive. --- lib/spack/spack/package.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index e7905cb410..bb6180c521 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -615,12 +615,19 @@ class Package(object): if spack.do_checksum and not self.version in self.versions: tty.warn("There is no checksum on file to fetch %s safely." % self.spec.format('$_$@')) - ignore = tty.get_yes_or_no(" Fetch anyway?", default=False) - msg = "Add a checksum or use --no-checksum to skip this check." - if ignore: - tty.msg("Fetching with no checksum.", msg) - else: - raise FetchError("Will not fetch %s." % self.spec.format('$_$@'), msg) + + # Ask the user whether to skip the checksum if we're + # interactive, but just fail if non-interactive. + checksum_msg = "Add a checksum or use --no-checksum to skip this check." + ignore_checksum = False + if sys.stdout.isatty(): + ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False) + if ignore_checksum: + tty.msg("Fetching with no checksum.", checksum_msg) + + if not ignore_checksum: + raise FetchError( + "Will not fetch %s." % self.spec.format('$_$@'), checksum_msg) self.stage.fetch() -- cgit v1.2.3-70-g09d2 From d2fe038cafc2fb2f8835bc1d4e51df4b567c0e59 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 11 Nov 2014 23:39:43 -0800 Subject: Minor bugfix for 404 error on fetch. --- lib/spack/spack/fetch_strategy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 80e2cdf413..d48b999ddc 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -156,7 +156,7 @@ class URLFetchStrategy(FetchStrategy): if spack.curl.returncode == 22: # This is a 404. Curl will print the error. - raise FailedDownloadError(url) + raise FailedDownloadError(self.url) if spack.curl.returncode == 60: # This is a certificate error. Suggest spack -k -- cgit v1.2.3-70-g09d2 From 287b04e50a8a132f224def6ee1b36e8f4d0b105f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 23 Nov 2014 17:53:21 -0600 Subject: Bugfix in terminal_size() --- lib/spack/llnl/util/tty/__init__.py | 23 +++++++++++++---------- lib/spack/llnl/util/tty/colify.py | 4 ++-- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 652d1cc1a8..5eeab67d6f 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -25,6 +25,9 @@ import sys import os import textwrap +import fcntl +import termios +import struct from StringIO import StringIO from llnl.util.tty.color import * @@ -155,7 +158,7 @@ def hline(label=None, **kwargs): color = kwargs.get('color', '') max_width = kwargs.get('max_width', 64) - cols, rows = terminal_size() + rows, cols = terminal_size() if not cols: cols = max_width else: @@ -178,22 +181,22 @@ def hline(label=None, **kwargs): def terminal_size(): - """Gets the dimensions of the console: cols, rows.""" + """Gets the dimensions of the console: (rows, cols).""" def ioctl_GWINSZ(fd): try: - cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) except: return - return cr - cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) - if not cr: + return rc + rc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not rc: try: fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) + rc = ioctl_GWINSZ(fd) os.close(fd) except: pass - if not cr: - cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + if not rc: + rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) - return int(cr[1]), int(cr[0]) + return int(rc[0]), int(rc[1]) diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index ff06241937..0d068351a6 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -121,7 +121,7 @@ def colify(elts, **options): console_cols = options.get("cols", None) if not console_cols: - console_cols, console_rows = terminal_size() + console_rows, console_cols = terminal_size() elif type(console_cols) != int: raise ValueError("Number of columns must be an int") console_cols = max(1, console_cols - indent) @@ -167,7 +167,7 @@ def colified(elts, **options): if __name__ == "__main__": import optparse - cols, rows = terminal_size() + rows, cols = terminal_size() parser = optparse.OptionParser() parser.add_option("-u", "--uniform", action="store_true", default=False, help="Use uniformly sized columns instead of variable-size.") -- cgit v1.2.3-70-g09d2 From 22e4d11010059614bfc310a4d459be2f84524c87 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 23 Nov 2014 17:55:57 -0600 Subject: Cleanup code in colify. --- lib/spack/llnl/util/tty/colify.py | 109 ++++++++++++++++++++------------------ 1 file changed, 57 insertions(+), 52 deletions(-) diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 0d068351a6..300bcfdd79 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -22,16 +22,9 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -# colify -# By Todd Gamblin, tgamblin@llnl.gov -# -# Takes a list of items as input and finds a good columnization of them, -# similar to how gnu ls does. You can pipe output to this script and -# get a tight display for it. This supports both uniform-width and -# variable-width (tighter) columns. -# -# Run colify -h for more information. -# +""" +Routines for printing columnar output. See colify() for more information. +""" import os import sys import fcntl @@ -54,12 +47,21 @@ class ColumnConfig: return "" % ", ".join("%s: %r" % a for a in attrs) -def config_variable_cols(elts, console_cols, padding): +def config_variable_cols(elts, console_width, padding): + """Variable-width column fitting algorithm. + + This function determines the most columns that can fit in the + screen width. Unlike uniform fitting, where all columns take + the width of the longest element in the list, each column takes + the width of its own longest element. This packs elements more + efficiently on screen. + """ # Get a bound on the most columns we could possibly have. lengths = [len(elt) for elt in elts] - max_cols = max(1, console_cols / (min(lengths) + padding)) + max_cols = max(1, console_width / (min(lengths) + padding)) max_cols = min(len(elts), max_cols) + # Determine the most columns possible for the console width. configs = [ColumnConfig(c) for c in xrange(1, max_cols+1)] for elt, length in enumerate(lengths): for i, conf in enumerate(configs): @@ -72,7 +74,7 @@ def config_variable_cols(elts, console_cols, padding): if conf.widths[col] < padded: conf.line_length += padded - conf.widths[col] conf.widths[col] = padded - conf.valid = (conf.line_length < console_cols) + conf.valid = (conf.line_length < console_width) try: config = next(conf for conf in reversed(configs) if conf.valid) @@ -85,26 +87,55 @@ def config_variable_cols(elts, console_cols, padding): return config -def config_uniform_cols(elts, console_cols, padding): +def config_uniform_cols(elts, console_width, padding): + """Uniform-width column fitting algorithm. + + Determines the longest element in the list, and determines how + many columns of that width will fit on screen. Returns a + corresponding column config. + """ max_len = max(len(elt) for elt in elts) + padding - cols = max(1, console_cols / max_len) + cols = max(1, console_width / max_len) cols = min(len(elts), cols) config = ColumnConfig(cols) config.widths = [max_len] * cols return config -def isatty(ostream): - force = os.environ.get('COLIFY_TTY', 'false').lower() != 'false' - return force or ostream.isatty() +def colify(elts, **options): + """Takes a list of elements as input and finds a good columnization + of them, similar to how gnu ls does. This supports both + uniform-width and variable-width (tighter) columns. + + If elts is not a list of strings, each element is first conveted + using str(). + Keyword arguments: -def colify(elts, **options): + output= A file object to write to. Default is sys.stdout. + indent= Optionally indent all columns by some number of spaces. + padding= Spaces between columns. Default is 2. + + tty= Whether to attempt to write to a tty. Default is to + autodetect a tty. Set to False to force single-column output. + + method= Method to use to fit columns. Options are variable or uniform. + Variable-width columns are tighter, uniform columns are all the + same width and fit less data on the screen. + + width= Width of the output. Default is 80 if tty is not detected. + """ # Get keyword arguments or set defaults - output = options.get("output", sys.stdout) - indent = options.get("indent", 0) - padding = options.get("padding", 2) - tty = options.get('tty', None) + output = options.pop("output", sys.stdout) + indent = options.pop("indent", 0) + padding = options.pop("padding", 2) + tty = options.pop('tty', None) + method = options.pop("method", "variable") + console_cols = options.pop("width", None) + + if options: + raise TypeError("'%s' is an invalid keyword argument for this function." + % next(options.iterkeys())) # elts needs to be an array of strings so we can count the elements elts = [str(elt) for elt in elts] @@ -112,21 +143,21 @@ def colify(elts, **options): return (0, ()) if not tty: - if tty is False or not isatty(output): + if tty is False or not output.isatty(): for elt in elts: output.write("%s\n" % elt) maxlen = max(len(str(s)) for s in elts) return (1, (maxlen,)) - console_cols = options.get("cols", None) + # Specify the number of character columns to use. if not console_cols: console_rows, console_cols = terminal_size() elif type(console_cols) != int: raise ValueError("Number of columns must be an int") console_cols = max(1, console_cols - indent) - method = options.get("method", "variable") + # Choose a method. Variable-width colums vs uniform-width. if method == "variable": config = config_variable_cols(elts, console_cols, padding) elif method == "uniform": @@ -162,29 +193,3 @@ def colified(elts, **options): options['output'] = sio colify(elts, **options) return sio.getvalue() - - -if __name__ == "__main__": - import optparse - - rows, cols = terminal_size() - parser = optparse.OptionParser() - parser.add_option("-u", "--uniform", action="store_true", default=False, - help="Use uniformly sized columns instead of variable-size.") - parser.add_option("-p", "--padding", metavar="PADDING", action="store", - type=int, default=2, help="Spaces to add between columns. Default is 2.") - parser.add_option("-i", "--indent", metavar="SPACES", action="store", - type=int, default=0, help="Indent the output by SPACES. Default is 0.") - parser.add_option("-w", "--width", metavar="COLS", action="store", - type=int, default=cols, help="Indent the output by SPACES. Default is 0.") - options, args = parser.parse_args() - - method = "variable" - if options.uniform: - method = "uniform" - - if sys.stdin.isatty(): - parser.print_help() - sys.exit(1) - else: - colify([line.strip() for line in sys.stdin], method=method, **options.__dict__) -- cgit v1.2.3-70-g09d2 From 72c753b93e368441ba7fd281c74886c5e52a820c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 1 Dec 2014 21:29:01 -0800 Subject: Colify now supports fixing the number of columns. --- lib/spack/llnl/util/tty/colify.py | 44 ++++++++++++++++++++++++++++----------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 300bcfdd79..2c93eaadd5 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -47,7 +47,7 @@ class ColumnConfig: return "" % ", ".join("%s: %r" % a for a in attrs) -def config_variable_cols(elts, console_width, padding): +def config_variable_cols(elts, console_width, padding, cols=0): """Variable-width column fitting algorithm. This function determines the most columns that can fit in the @@ -55,20 +55,28 @@ def config_variable_cols(elts, console_width, padding): the width of the longest element in the list, each column takes the width of its own longest element. This packs elements more efficiently on screen. + + If cols is nonzero, force """ + if cols < 0: + raise ValueError("cols must be non-negative.") + # Get a bound on the most columns we could possibly have. lengths = [len(elt) for elt in elts] max_cols = max(1, console_width / (min(lengths) + padding)) max_cols = min(len(elts), max_cols) + # Range of column counts to try. If forced, use the supplied value. + col_range = [cols] if cols else xrange(1, max_cols+1) + # Determine the most columns possible for the console width. - configs = [ColumnConfig(c) for c in xrange(1, max_cols+1)] + configs = [ColumnConfig(c) for c in col_range] for elt, length in enumerate(lengths): - for i, conf in enumerate(configs): + for conf in configs: if conf.valid: - col = elt / ((len(elts) + i) / (i + 1)) + col = elt / ((len(elts) + conf.cols - 1) / conf.cols) padded = length - if col < i: + if col < (conf.cols - 1): padded += padding if conf.widths[col] < padded: @@ -87,16 +95,20 @@ def config_variable_cols(elts, console_width, padding): return config -def config_uniform_cols(elts, console_width, padding): +def config_uniform_cols(elts, console_width, padding, cols=0): """Uniform-width column fitting algorithm. Determines the longest element in the list, and determines how many columns of that width will fit on screen. Returns a corresponding column config. """ + if cols < 0: + raise ValueError("cols must be non-negative.") + max_len = max(len(elt) for elt in elts) + padding - cols = max(1, console_width / max_len) - cols = min(len(elts), cols) + if cols == 0: + cols = max(1, console_width / max_len) + cols = min(len(elts), cols) config = ColumnConfig(cols) config.widths = [max_len] * cols return config @@ -115,6 +127,10 @@ def colify(elts, **options): output= A file object to write to. Default is sys.stdout. indent= Optionally indent all columns by some number of spaces. padding= Spaces between columns. Default is 2. + width= Width of the output. Default is 80 if tty is not detected. + + cols= Force number of columns. Default is to size to terminal, + or single-column if no tty tty= Whether to attempt to write to a tty. Default is to autodetect a tty. Set to False to force single-column output. @@ -123,15 +139,19 @@ def colify(elts, **options): Variable-width columns are tighter, uniform columns are all the same width and fit less data on the screen. - width= Width of the output. Default is 80 if tty is not detected. + decorator= Function to add decoration (such as color) after columns have + already been fitted. Useful for fitting based only on + positive-width characters. """ # Get keyword arguments or set defaults + cols = options.pop("cols", 0) output = options.pop("output", sys.stdout) indent = options.pop("indent", 0) padding = options.pop("padding", 2) tty = options.pop('tty', None) method = options.pop("method", "variable") console_cols = options.pop("width", None) + decorator = options.pop("decorator", lambda x:x) if options: raise TypeError("'%s' is an invalid keyword argument for this function." @@ -159,9 +179,9 @@ def colify(elts, **options): # Choose a method. Variable-width colums vs uniform-width. if method == "variable": - config = config_variable_cols(elts, console_cols, padding) + config = config_variable_cols(elts, console_cols, padding, cols) elif method == "uniform": - config = config_uniform_cols(elts, console_cols, padding) + config = config_uniform_cols(elts, console_cols, padding, cols) else: raise ValueError("method must be one of: " + allowed_methods) @@ -176,7 +196,7 @@ def colify(elts, **options): output.write(" " * indent) for col in xrange(cols): elt = col * rows + row - output.write(formats[col] % elts[elt]) + output.write(formats[col] % decorator(elts[elt])) output.write("\n") row += 1 -- cgit v1.2.3-70-g09d2 From e15316e8256175359ef38066900df45b8b4d126c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 1 Dec 2014 23:13:09 -0800 Subject: index_by supports compound index keys. --- lib/spack/llnl/util/lang.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index ce7d0197f0..049d158c6d 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -68,6 +68,12 @@ def index_by(objects, *funcs): index1 = index_by(list_of_specs, 'arch', 'compiler') index2 = index_by(list_of_specs, 'compiler') + + You can also index by tuples by passing tuples: + + index1 = index_by(list_of_specs, ('arch', 'compiler')) + + Keys in the resulting dict will look like ('gcc', 'bgqos_0'). """ if not funcs: return objects @@ -75,6 +81,8 @@ def index_by(objects, *funcs): f = funcs[0] if isinstance(f, basestring): f = lambda x: getattr(x, funcs[0]) + elif isinstance(f, tuple): + f = lambda x: tuple(getattr(x, p) for p in funcs[0]) result = {} for o in objects: -- cgit v1.2.3-70-g09d2 From 40b4fa544362d2942ef7663c7fc809c1f5ef591b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 1 Dec 2014 23:14:06 -0800 Subject: Better spack find view. --- lib/spack/llnl/util/tty/colify.py | 21 +++++++++++- lib/spack/spack/cmd/find.py | 70 ++++++++++++++++++++------------------- 2 files changed, 56 insertions(+), 35 deletions(-) diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 2c93eaadd5..5c5c6077ec 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -196,7 +196,7 @@ def colify(elts, **options): output.write(" " * indent) for col in xrange(cols): elt = col * rows + row - output.write(formats[col] % decorator(elts[elt])) + output.write(decorator(formats[col] % elts[elt])) output.write("\n") row += 1 @@ -206,6 +206,25 @@ def colify(elts, **options): return (config.cols, tuple(config.widths)) +def colify_table(table, **options): + if table is None: + raise TypeError("Can't call colify_table on NoneType") + elif not table or not table[0]: + raise ValueError("Table is empty in colify_table!") + + columns = len(table[0]) + def transpose(): + for i in xrange(columns): + for row in table: + yield row[i] + + if 'cols' in options: + raise ValueError("Cannot override columsn in colify_table.") + options['cols'] = columns + + colify(transpose(), **options) + + def colified(elts, **options): """Invokes the colify() function but returns the result as a string instead of writing it to an output string.""" diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 2238484a21..c0a3162429 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -24,13 +24,14 @@ ############################################################################## import sys import collections +import itertools from external import argparse from StringIO import StringIO import llnl.util.tty as tty -from llnl.util.tty.colify import colify +from llnl.util.tty.colify import * from llnl.util.tty.color import * -from llnl.util.lang import partition_list, index_by +from llnl.util.lang import * import spack import spack.spec @@ -65,39 +66,40 @@ def find(parser, args): if not query_specs: return - specs = [s for s in spack.db.installed_package_specs() - if not query_specs or any(s.satisfies(q) for q in query_specs)] + # Get all the specs the user asked for + if not query_specs: + specs = set(spack.db.installed_package_specs()) + else: + results = [set(spack.db.get_installed(qs)) for qs in query_specs] + specs = set.union(*results) # Make a dict with specs keyed by architecture and compiler. - index = index_by(specs, 'architecture', 'compiler') + index = index_by(specs, ('architecture', 'compiler')) # Traverse the index and print out each package - for architecture in index: - tty.hline(architecture, char='=', color=spack.spec.architecture_color) - for compiler in index[architecture]: - tty.hline(compiler, char='-', color=spack.spec.compiler_color) - - specs = index[architecture][compiler] - specs.sort() - - abbreviated = [s.format('$_$@$+$#', color=True) for s in specs] - - if args.paths: - # Print one spec per line along with prefix path - width = max(len(s) for s in abbreviated) - width += 2 - format = " %-{}s%s".format(width) - - for abbrv, spec in zip(abbreviated, specs): - print format % (abbrv, spec.prefix) - - elif args.full_specs: - for spec in specs: - print spec.tree(indent=4, format='$_$@$+', color=True), - else: - max_len = max([len(s.name) for s in specs]) - max_len += 4 - - for spec in specs: - format = '$-' + str(max_len) + '_$@$+$#' - print " " + spec.format(format, color=True) + for i, (architecture, compiler) in enumerate(sorted(index)): + if i > 0: print + tty.hline("%s / %s" % (compiler, architecture), char='-') + + specs = index[(architecture, compiler)] + specs.sort() + + abbreviated = [s.format('$_$@$+$#', color=True) for s in specs] + + if args.paths: + # Print one spec per line along with prefix path + width = max(len(s) for s in abbreviated) + width += 2 + format = " %-{}s%s".format(width) + + for abbrv, spec in zip(abbreviated, specs): + print format % (abbrv, spec.prefix) + + elif args.full_specs: + for spec in specs: + print spec.tree(indent=4, format='$_$@$+', color=True), + else: + max_len = max([len(s.name) for s in specs]) + max_len += 4 + + colify((s.format('$-_$@$+$#') for s in specs), decorator=spack.spec.colorize_spec) -- cgit v1.2.3-70-g09d2 From e71cf672f19565b8cc5b7f1aaec33dca6a3a74f4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 2 Dec 2014 09:58:30 -0800 Subject: Fail fast in stage if all fetch strategies fail for a package. --- lib/spack/spack/fetch_strategy.py | 12 ++++++++++-- lib/spack/spack/stage.py | 2 ++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index d48b999ddc..0e848652ae 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -156,9 +156,10 @@ class URLFetchStrategy(FetchStrategy): if spack.curl.returncode == 22: # This is a 404. Curl will print the error. - raise FailedDownloadError(self.url) + raise FailedDownloadError( + self.url, "URL %s was not found!" % self.url) - if spack.curl.returncode == 60: + elif spack.curl.returncode == 60: # This is a certificate error. Suggest spack -k raise FailedDownloadError( self.url, @@ -168,6 +169,13 @@ class URLFetchStrategy(FetchStrategy): "can try running spack -k, which will not check SSL certificates." "Use this at your own risk.") + else: + # This is some other curl error. Curl will print the + # error, but print a spack message too + raise FailedDownloadError( + self.url, "Curl failed with error %d", spack.curl.returncode) + + # Check if we somehow got an HTML file rather than the archive we # asked for. We only look at the last content type, to handle # redirects properly. diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index f09346ab9b..84454c9d2c 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -260,6 +260,8 @@ class Stage(object): tty.msg("Fetching from %s failed." % fetcher) tty.debug(e) continue + else: + tty.die("All fetchers failed for %s" % self.name) def check(self): -- cgit v1.2.3-70-g09d2 From 0c12e26026cd8262be6f678da437430c76293e47 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 2 Dec 2014 14:25:52 -0800 Subject: Bugfix in boost build. - b2 used to be called bjam --- var/spack/packages/boost/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py index d3231c3baa..254d1afda1 100644 --- a/var/spack/packages/boost/package.py +++ b/var/spack/packages/boost/package.py @@ -53,7 +53,10 @@ class Boost(Package): bootstrap = Executable('./bootstrap.sh') bootstrap() - b2 = Executable('./b2') + # b2 used to be called bjam, before 1.47 (sigh) + b2name = './b2' if spec.satisfies('@1.47:') else './bjam' + + b2 = Executable(b2name) b2('install', '-j %s' % make_jobs, '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From 908400bfc5bca8bf10ee5bdadf17a40bddc68caf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 2 Dec 2014 21:57:22 -0800 Subject: Fix dyninst 8.1.1 checksum. --- var/spack/packages/dyninst/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/dyninst/package.py b/var/spack/packages/dyninst/package.py index df19ac7bc0..9fbc6385a9 100644 --- a/var/spack/packages/dyninst/package.py +++ b/var/spack/packages/dyninst/package.py @@ -32,7 +32,7 @@ class Dyninst(Package): list_url = "http://www.dyninst.org/downloads/dyninst-8.x" version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a') - version('8.1.1', '1f8743e3a5662b25ce64a7edf647e77d') + version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac') depends_on("libelf") depends_on("libdwarf") -- cgit v1.2.3-70-g09d2 From 11cffff943b04ce69692db973c48de29647c8087 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 2 Dec 2014 21:56:22 -0800 Subject: colify handles ansi color input directly; no more decorator. --- lib/spack/llnl/util/tty/__init__.py | 25 ++++++++----------- lib/spack/llnl/util/tty/colify.py | 50 ++++++++++++++++++++----------------- lib/spack/llnl/util/tty/color.py | 5 ++++ lib/spack/spack/cmd/compiler.py | 10 +++++--- lib/spack/spack/cmd/find.py | 14 +++++------ 5 files changed, 56 insertions(+), 48 deletions(-) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 5eeab67d6f..aba9e61f4f 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -145,18 +145,16 @@ def get_yes_or_no(prompt, **kwargs): def hline(label=None, **kwargs): - """Draw an optionally colored or labeled horizontal line. + """Draw a labeled horizontal line. Options: - char Char to draw the line with. Default '-' - color Color of the label. Default is no color. max_width Maximum width of the line. Default is 64 chars. - - See tty.color for possible color formats. """ - char = kwargs.get('char', '-') - color = kwargs.get('color', '') - max_width = kwargs.get('max_width', 64) + char = kwargs.pop('char', '-') + max_width = kwargs.pop('max_width', 64) + if kwargs: + raise TypeError("'%s' is an invalid keyword argument for this function." + % next(kwargs.iterkeys())) rows, cols = terminal_size() if not cols: @@ -166,15 +164,12 @@ def hline(label=None, **kwargs): cols = min(max_width, cols) label = str(label) - prefix = char * 2 + " " + label + " " - suffix = (cols - len(prefix)) * char + prefix = char * 2 + " " + suffix = " " + (cols - len(prefix) - clen(label)) * char out = StringIO() - if color: - prefix = char * 2 + " " + color + cescape(label) + "@. " - cwrite(prefix, stream=out, color=True) - else: - out.write(prefix) + out.write(prefix) + out.write(label) out.write(suffix) print out.getvalue() diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 5c5c6077ec..6b2909990c 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -33,6 +33,7 @@ import struct from StringIO import StringIO from llnl.util.tty import terminal_size +from llnl.util.tty.color import clen class ColumnConfig: @@ -40,7 +41,8 @@ class ColumnConfig: self.cols = cols self.line_length = 0 self.valid = True - self.widths = [0] * cols + self.widths = [0] * cols # does not include ansi colors + self.cwidths = [0] * cols # includes ansi colors def __repr__(self): attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")] @@ -62,7 +64,10 @@ def config_variable_cols(elts, console_width, padding, cols=0): raise ValueError("cols must be non-negative.") # Get a bound on the most columns we could possibly have. - lengths = [len(elt) for elt in elts] + # 'clen' ignores length of ansi color sequences. + lengths = [clen(e) for e in elts] + clengths = [len(e) for e in elts] + max_cols = max(1, console_width / (min(lengths) + padding)) max_cols = min(len(elts), max_cols) @@ -71,17 +76,16 @@ def config_variable_cols(elts, console_width, padding, cols=0): # Determine the most columns possible for the console width. configs = [ColumnConfig(c) for c in col_range] - for elt, length in enumerate(lengths): + for i, length in enumerate(lengths): for conf in configs: if conf.valid: - col = elt / ((len(elts) + conf.cols - 1) / conf.cols) - padded = length - if col < (conf.cols - 1): - padded += padding - - if conf.widths[col] < padded: - conf.line_length += padded - conf.widths[col] - conf.widths[col] = padded + col = i / ((len(elts) + conf.cols - 1) / conf.cols) + p = padding if col < (conf.cols - 1) else 0 + + if conf.widths[col] < (length + p): + conf.line_length += length + p - conf.widths[col] + conf.widths[col] = length + p + conf.cwidths[col] = clengths[i] + p conf.valid = (conf.line_length < console_width) try: @@ -105,12 +109,17 @@ def config_uniform_cols(elts, console_width, padding, cols=0): if cols < 0: raise ValueError("cols must be non-negative.") - max_len = max(len(elt) for elt in elts) + padding + # 'clen' ignores length of ansi color sequences. + max_len = max(clen(e) for e in elts) + padding + max_clen = max(len(e) for e in elts) + padding if cols == 0: cols = max(1, console_width / max_len) cols = min(len(elts), cols) + config = ColumnConfig(cols) config.widths = [max_len] * cols + config.cwidths = [max_clen] * cols + return config @@ -139,9 +148,8 @@ def colify(elts, **options): Variable-width columns are tighter, uniform columns are all the same width and fit less data on the screen. - decorator= Function to add decoration (such as color) after columns have - already been fitted. Useful for fitting based only on - positive-width characters. + len= Function to use for calculating string length. + Useful for ignoring ansi color. Default is 'len'. """ # Get keyword arguments or set defaults cols = options.pop("cols", 0) @@ -151,7 +159,6 @@ def colify(elts, **options): tty = options.pop('tty', None) method = options.pop("method", "variable") console_cols = options.pop("width", None) - decorator = options.pop("decorator", lambda x:x) if options: raise TypeError("'%s' is an invalid keyword argument for this function." @@ -162,13 +169,10 @@ def colify(elts, **options): if not elts: return (0, ()) + # Use only one column if not a tty. if not tty: if tty is False or not output.isatty(): - for elt in elts: - output.write("%s\n" % elt) - - maxlen = max(len(str(s)) for s in elts) - return (1, (maxlen,)) + cols = 1 # Specify the number of character columns to use. if not console_cols: @@ -186,7 +190,7 @@ def colify(elts, **options): raise ValueError("method must be one of: " + allowed_methods) cols = config.cols - formats = ["%%-%ds" % width for width in config.widths[:-1]] + formats = ["%%-%ds" % width for width in config.cwidths[:-1]] formats.append("%s") # last column has no trailing space rows = (len(elts) + cols - 1) / cols @@ -196,7 +200,7 @@ def colify(elts, **options): output.write(" " * indent) for col in xrange(cols): elt = col * rows + row - output.write(decorator(formats[col] % elts[elt])) + output.write(formats[col] % elts[elt]) output.write("\n") row += 1 diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 14974a1014..598e9d44f5 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -149,6 +149,11 @@ def colorize(string, **kwargs): return re.sub(color_re, match_to_ansi(color), string) +def clen(string): + """Return the length of a string, excluding ansi color sequences.""" + return len(re.sub(r'\033[^m]*m', '', string)) + + def cwrite(string, stream=sys.stdout, color=None): """Replace all color expressions in string with ANSI control codes and write the result to the stream. If color is diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 5c46a3536d..e37f44b3b7 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -25,6 +25,7 @@ from external import argparse import llnl.util.tty as tty +from llnl.util.tty.color import colorize from llnl.util.tty.colify import colify from llnl.util.lang import index_by @@ -96,9 +97,12 @@ def compiler_info(args): def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(), 'name') - for name, compilers in index.items(): - tty.hline(name, char='-', color=spack.spec.compiler_color) - colify(reversed(sorted(compilers)), indent=4) + for i, (name, compilers) in enumerate(index.items()): + if i >= 1: print + + cname = "%s{%s}" % (spack.spec.compiler_color, name) + tty.hline(colorize(cname), char='-') + colify(reversed(sorted(compilers))) def compiler(parser, args): diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index c0a3162429..b6efc980b6 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -79,13 +79,16 @@ def find(parser, args): # Traverse the index and print out each package for i, (architecture, compiler) in enumerate(sorted(index)): if i > 0: print - tty.hline("%s / %s" % (compiler, architecture), char='-') - specs = index[(architecture, compiler)] + header = "%s{%s} / %s{%s}" % ( + spack.spec.architecture_color, architecture, + spack.spec.compiler_color, compiler) + tty.hline(colorize(header), char='-') + + specs = index[(architecture,compiler)] specs.sort() abbreviated = [s.format('$_$@$+$#', color=True) for s in specs] - if args.paths: # Print one spec per line along with prefix path width = max(len(s) for s in abbreviated) @@ -99,7 +102,4 @@ def find(parser, args): for spec in specs: print spec.tree(indent=4, format='$_$@$+', color=True), else: - max_len = max([len(s.name) for s in specs]) - max_len += 4 - - colify((s.format('$-_$@$+$#') for s in specs), decorator=spack.spec.colorize_spec) + colify(s.format('$-_$@$+$#', color=True) for s in specs) -- cgit v1.2.3-70-g09d2 From fdc6081244d0d95793c23a5295edc580096ad351 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 2 Dec 2014 22:52:53 -0800 Subject: CLI improvements to find and list. --- lib/spack/spack/cmd/find.py | 16 +++++++++++----- lib/spack/spack/cmd/list.py | 1 - 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index b6efc980b6..1de3413d42 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -40,12 +40,15 @@ description ="Find installed spack packages" def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() + format_group.add_argument( + '-l', '--long', action='store_true', dest='long', + help='Show dependency hashes as well as versions.') format_group.add_argument( '-p', '--paths', action='store_true', dest='paths', help='Show paths to package install directories') format_group.add_argument( - '-l', '--long', action='store_true', dest='full_specs', - help='Show full-length specs of installed packages') + '-d', '--deps', action='store_true', dest='full_deps', + help='Show full dependency DAG of installed packages') subparser.add_argument( 'query_specs', nargs=argparse.REMAINDER, @@ -88,7 +91,7 @@ def find(parser, args): specs = index[(architecture,compiler)] specs.sort() - abbreviated = [s.format('$_$@$+$#', color=True) for s in specs] + abbreviated = [s.format('$_$@$+', color=True) for s in specs] if args.paths: # Print one spec per line along with prefix path width = max(len(s) for s in abbreviated) @@ -98,8 +101,11 @@ def find(parser, args): for abbrv, spec in zip(abbreviated, specs): print format % (abbrv, spec.prefix) - elif args.full_specs: + elif args.full_deps: for spec in specs: print spec.tree(indent=4, format='$_$@$+', color=True), else: - colify(s.format('$-_$@$+$#', color=True) for s in specs) + fmt = '$-_$@$+' + if args.long: + fmt += '$#' + colify(s.format(fmt, color=True) for s in specs) diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py index 5c7051d6a9..1f0978a18e 100644 --- a/lib/spack/spack/cmd/list.py +++ b/lib/spack/spack/cmd/list.py @@ -61,5 +61,4 @@ def list(parser, args): indent=0 if sys.stdout.isatty(): tty.msg("%d packages." % len(sorted_packages)) - indent=2 colify(sorted_packages, indent=indent) -- cgit v1.2.3-70-g09d2 From 0f04f75fa3caeb5de5b367c62a79167d8b52a3fe Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Tue, 2 Dec 2014 18:12:25 -0800 Subject: add autoconf and libcircle dependencies, call autoconf before configure --- var/spack/packages/mpibash/mpibash-4.3.patch | 1565 ++++++++++++++++++++++++++ var/spack/packages/mpibash/package.py | 32 + 2 files changed, 1597 insertions(+) create mode 100644 var/spack/packages/mpibash/mpibash-4.3.patch create mode 100644 var/spack/packages/mpibash/package.py diff --git a/var/spack/packages/mpibash/mpibash-4.3.patch b/var/spack/packages/mpibash/mpibash-4.3.patch new file mode 100644 index 0000000000..17e285b0bf --- /dev/null +++ b/var/spack/packages/mpibash/mpibash-4.3.patch @@ -0,0 +1,1565 @@ +diff -Naur bash-4.3/builtins/circle.def mpibash-4.3/builtins/circle.def +--- bash-4.3/builtins/circle.def 1969-12-31 17:00:00.000000000 -0700 ++++ mpibash-4.3/builtins/circle.def 2014-05-13 11:27:37.314100671 -0600 +@@ -0,0 +1,620 @@ ++This file is circle.def, from which is created circle.c. ++It implements all of the "circle_*" builtins in Bash. ++ ++$PRODUCES circle.c ++ ++#include ++ ++#include ++#if defined (HAVE_UNISTD_H) ++# ifdef _MINIX ++# include ++# endif ++# include ++#endif ++ ++#include "../bashintl.h" ++#include "../shell.h" ++#include "common.h" ++#include "bashgetopt.h" ++#include ++ ++extern int running_trap, trap_saved_exit_value; ++ ++static int circle_rank; /* Rank in the Libcircle job */ ++static SHELL_VAR *create_func = NULL; /* User-defined callback function for CIRCLE_cb_create. */ ++static SHELL_VAR *process_func = NULL; /* User-defined callback function for CIRCLE_cb_process. */ ++static SHELL_VAR *reduce_init_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_init. */ ++static SHELL_VAR *reduce_fini_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_fini. */ ++static SHELL_VAR *reduce_op_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_op. */ ++static CIRCLE_handle *current_handle = NULL; /* Active handle within a callback or NULL if not within a callback */ ++static int within_reduction = 0; /* 1=within a reduction callback; 0=not */ ++ ++/* Return with a usage message if no arguments remain. */ ++#define YES_ARGS(LIST) \ ++ if ((LIST) == 0) \ ++ { \ ++ builtin_usage (); \ ++ return (EX_USAGE); \ ++ } ++ ++/* Perform the same operation as bind_variable, but with VALUE being a ++ * number, not a string. */ ++static SHELL_VAR * ++bind_variable_number (name, value, flags) ++ const char *name; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_variable (name, numstr, flags); ++} ++ ++/* Invoke the user-defined creation-callback function (create_func). */ ++static void ++internal_create_func (handle) ++ CIRCLE_handle *handle; ++{ ++ WORD_LIST *funcargs; ++ ++ if (create_func == NULL) ++ return; ++ current_handle = handle; ++ funcargs = make_word_list (make_word ("cb_create"), NULL); ++ execute_shell_function (create_func, funcargs); ++ dispose_words (funcargs); ++ current_handle = NULL; ++} ++ ++/* Invoke the user-defined process-callback function (process_func). */ ++static void ++internal_process_func (handle) ++ CIRCLE_handle *handle; ++{ ++ WORD_LIST *funcargs; ++ ++ if (process_func == NULL) ++ return; ++ current_handle = handle; ++ funcargs = make_word_list (make_word ("cb_process"), NULL); ++ execute_shell_function (process_func, funcargs); ++ dispose_words (funcargs); ++ current_handle = NULL; ++} ++ ++/* Invoke the user-defined reduction-initiation callback function ++ * (reduce_init_func). */ ++static void ++internal_reduce_init_func (void) ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_init_func == NULL) ++ return; ++ within_reduction = 1; ++ funcargs = make_word_list (make_word ("cb_reduce_init"), NULL); ++ execute_shell_function (reduce_init_func, funcargs); ++ dispose_words (funcargs); ++ within_reduction = 0; ++} ++ ++/* Invoke the user-defined reduction callback function ++ * (reduce_op_func). */ ++static void ++internal_reduce_op_func (buf1, size1, buf2, size2) ++ const void* buf1; ++ size_t size1; ++ const void* buf2; ++ size_t size2; ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_op_func == NULL) ++ return; ++ within_reduction = 1; ++ funcargs = make_word_list (make_word (buf2), NULL); ++ funcargs = make_word_list (make_word (buf1), funcargs); ++ funcargs = make_word_list (make_word ("cb_reduce_op"), funcargs); ++ execute_shell_function (reduce_op_func, funcargs); ++ dispose_words (funcargs); ++ within_reduction = 0; ++} ++ ++/* Invoke the user-defined reduction-finalization callback function ++ * (reduce_fini_func). */ ++static void ++internal_reduce_fini_func (buf, size) ++ const void* buf; ++ size_t size; ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_fini_func == NULL) ++ return; ++ funcargs = make_word_list (make_word (buf), NULL); ++ funcargs = make_word_list (make_word ("cb_reduce_fini"), funcargs); ++ execute_shell_function (reduce_fini_func, funcargs); ++ dispose_words (funcargs); ++} ++ ++/* Look up a user-provided callback function. */ ++static int ++find_callback_function (list, user_func) ++ WORD_LIST *list; ++ SHELL_VAR **user_func; ++{ ++ char *funcname; /* Name of the user-defined function. */ ++ ++ /* If no argument was provided, nullify the callback function. */ ++ if (list == NULL) ++ { ++ *user_func = NULL; ++ return EXECUTION_SUCCESS; ++ } ++ ++ /* Get the callback function. */ ++ funcname = list->word->word; ++ list = list->next; ++ no_args (list); ++ *user_func = find_function (funcname); ++ if (*user_func == NULL) ++ { ++ builtin_error (_("function %s not found"), funcname); ++ return EXECUTION_FAILURE; ++ } ++ return EXECUTION_SUCCESS; ++} ++ ++/* Initialize Libcircle. */ ++void ++initialize_libcircle (argc, argv) ++ int argc; ++ char **argv; ++{ ++ circle_rank = CIRCLE_init (argc, argv, CIRCLE_DEFAULT_FLAGS); ++ bind_variable_number ("circle_rank", circle_rank, 0); ++ CIRCLE_enable_logging (CIRCLE_LOG_WARN); ++ CIRCLE_cb_create (internal_create_func); ++ CIRCLE_cb_process (internal_process_func); ++ CIRCLE_cb_reduce_init (internal_reduce_init_func); ++ CIRCLE_cb_reduce_op (internal_reduce_op_func); ++ CIRCLE_cb_reduce_fini (internal_reduce_fini_func); ++} ++ ++/* Finalize Libcircle. */ ++void ++finalize_libcircle (void) ++{ ++ CIRCLE_finalize (); ++} ++ ++/* ---------------------------------------------------------------------- */ ++ ++$BUILTIN circle_set_options ++$FUNCTION circle_set_options_builtin ++$SHORT_DOC circle_set_options [flag]... ++Change Libcircle's run-time behavior. ++ ++Arguments: ++ FLAG "split_random", "split_equal", or "create_global" ++ ++Multiple flags can be provided. If no flags are provided, Libcircle ++reverts to its default options. ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the circle_set_options builtin. */ ++int ++circle_set_options_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ int flags = 0; /* Flags to pass to CIRCLE_set_options */ ++ ++ if (list == NULL) ++ flags = CIRCLE_DEFAULT_FLAGS; ++ else ++ while (list != NULL) ++ { ++ word = list->word->word; ++ if (!strcmp (word, "split_random")) ++ flags |= CIRCLE_SPLIT_RANDOM; ++ else if (!strcmp (word, "split_equal")) ++ flags |= CIRCLE_SPLIT_EQUAL; ++ else if (!strcmp (word, "create_global")) ++ flags |= CIRCLE_CREATE_GLOBAL; ++ else ++ { ++ builtin_error (_("invalid flag \"%s\""), word); ++ return (EXECUTION_FAILURE); ++ } ++ list = list->next; ++ } ++ CIRCLE_set_options (flags); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_cb_create ++$FUNCTION circle_cb_create_builtin ++$SHORT_DOC circle_cb_create [func] ++Register a function that will create work when asked. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_enqueue when called ++ ++If FUNC is omitted, no function will be associated with work creation. ++This can be used to nullify a previous circle_cb_create invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_create builtin. */ ++int ++circle_cb_create_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &create_func); ++} ++ ++$BUILTIN circle_cb_process ++$FUNCTION circle_cb_process_builtin ++$SHORT_DOC circle_cb_process [func] ++Register a function that will process work when asked. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_enqueue when called ++ ++If FUNC is omitted, no function will be associated with work processing. ++This can be used to nullify a previous circle_cb_process invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_process builtin. */ ++int ++circle_cb_process_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &process_func); ++} ++ ++$BUILTIN circle_begin ++$FUNCTION circle_begin_builtin ++$SHORT_DOC circle_begin ++Begin creation and processing of the distributed work queue. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_begin builtin. */ ++int ++circle_begin_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_begin (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_enqueue ++$FUNCTION circle_enqueue_builtin ++$SHORT_DOC circle_enqueue work ++Enqueue work onto the distributed queue. ++ ++Arguments: ++ WORK "Work" as represented by an arbitrary string of limited ++ size (generally around 4KB) ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_enqueue builtin. */ ++int ++circle_enqueue_builtin (list) ++ WORD_LIST *list; ++{ ++ char *work; /* Work to perform */ ++ ++ /* Extract the work argument. */ ++ YES_ARGS (list); ++ work = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a proper callback function. */ ++ if (current_handle == NULL) ++ { ++ builtin_error (_("not within a Libcircle \"create\" or \"process\" callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Enqueue the work. */ ++ if (current_handle->enqueue (work) == -1) ++ return EXECUTION_FAILURE; ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_dequeue ++$FUNCTION circle_dequeue_builtin ++$SHORT_DOC circle_dequeue var ++Dequeue work from the distributed queue into a variable. ++ ++Arguments: ++ VAR Variable in which to receive previously enqueued "work" ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_dequeue builtin. */ ++int ++circle_dequeue_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Variable in which to store the work string */ ++ char work[CIRCLE_MAX_STRING_LEN+1]; /* Work to perform */ ++ ++ /* Extract the variable-name argument. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a callback function. */ ++ if (current_handle == NULL) ++ { ++ builtin_error (_("not within a Libcircle callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Dequeue the work and bind it to the given variable. */ ++ if (current_handle->dequeue (work) == -1) ++ return EXECUTION_FAILURE; ++ bind_variable (varname, work, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_enable_logging ++$FUNCTION circle_enable_logging_builtin ++$SHORT_DOC circle_enable_logging log_level ++Change Libcircle's logging verbosity ++ ++Arguments: ++ LOG_LEVEL "fatal", "error", "warning", "info", or "debug" ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the circle_enable_logging builtin. */ ++int ++circle_enable_logging_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ CIRCLE_loglevel loglevel; /* Level to set */ ++ ++ /* Parse the log level. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!strcmp (word, "fatal")) ++ loglevel = CIRCLE_LOG_FATAL; ++ else if (!strcmp (word, "error")) ++ loglevel = CIRCLE_LOG_ERR; ++ else if (!strcmp (word, "warning")) ++ loglevel = CIRCLE_LOG_WARN; ++ else if (!strcmp (word, "info")) ++ loglevel = CIRCLE_LOG_INFO; ++ else if (!strcmp (word, "debug")) ++ loglevel = CIRCLE_LOG_DBG; ++ else ++ { ++ builtin_error (_("invalid log level \"%s\""), word); ++ return (EXECUTION_FAILURE); ++ } ++ ++ /* Set the log level. */ ++ CIRCLE_enable_logging (loglevel); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_abort ++$FUNCTION circle_abort_builtin ++$SHORT_DOC circle_abort ++Terminate queue processing. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_abort builtin. */ ++int ++circle_abort_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_abort (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_checkpoint ++$FUNCTION circle_checkpoint_builtin ++$SHORT_DOC circle_checkpoint ++Checkpoint a work queue to disk. ++ ++Write a file called circle${circle_rank}.txt containing the current ++queue state of rank ${circle_rank}. On a later run, a worker can ++invoke circle_read_restarts to repopulate its queue from such a ++checkpoint file. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++/*'*/ ++ ++/* Here is the circle_checkpoint builtin. */ ++int ++circle_checkpoint_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_checkpoint (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_read_restarts ++$FUNCTION circle_read_restarts_builtin ++$SHORT_DOC circle_read_restarts ++Repopulate a work queue from a disk checkpoint. ++ ++Read queue contents from a file called circle${circle_rank}.txt, which ++was previously produced by circle_checkpoint. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++/*'*/ ++ ++/* Here is the circle_read_restarts builtin. */ ++int ++circle_read_restarts_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_read_restarts (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_cb_reduce_init ++$FUNCTION circle_cb_reduce_init_builtin ++$SHORT_DOC circle_cb_reduce_init [func] ++Register a function that will initiate a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_reduce when called ++ ++FUNC will be invoked on all ranks. ++ ++If FUNC is omitted, no function will be associated with reduction ++initialization. This can be used to nullify a previous ++circle_cb_reduce_init invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_init builtin. */ ++int ++circle_cb_reduce_init_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_init_func); ++} ++ ++$BUILTIN circle_cb_reduce_op ++$FUNCTION circle_cb_reduce_op_builtin ++$SHORT_DOC circle_cb_reduce_op [func] ++Register a function that will complete a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will receive ++ two items to reduce and invoke circle_reduce on ++ the reduced value ++ ++If FUNC is omitted, no function will be associated with reduction ++execution. This can be used to nullify a previous circle_cb_reduce_op ++invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_op builtin. */ ++int ++circle_cb_reduce_op_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_op_func); ++} ++ ++$BUILTIN circle_cb_reduce_fini ++$FUNCTION circle_cb_reduce_fini_builtin ++$SHORT_DOC circle_cb_reduce_fini [func] ++Register a function that will complete a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will receive ++ the final reduced data ++ ++If FUNC is omitted, no function will be associated with reduction ++completion. This can be used to nullify a previous ++circle_cb_reduce_fini invocation. ++ ++Libcircle guarantees that FUNC will be invoked only on rank 0. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_fini builtin. */ ++int ++circle_cb_reduce_fini_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_fini_func); ++} ++ ++$BUILTIN circle_reduce ++$FUNCTION circle_reduce_builtin ++$SHORT_DOC circle_reduce work ++Seed the next phase of a reduction operation ++ ++Arguments: ++ WORK "Work" as represented by an arbitrary string of limited ++ size (generally around 4KB) ++ ++This function should be called both by the callback function ++registered with circle_reduce_init and the callback function ++registered with circle_reduce_op. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_reduce builtin. */ ++int ++circle_reduce_builtin (list) ++ WORD_LIST *list; ++{ ++ char *work; /* Work to perform */ ++ ++ /* Extract the work argument. */ ++ YES_ARGS (list); ++ work = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a proper callback function. */ ++ if (!within_reduction) ++ { ++ builtin_error (_("not within a Libcircle \"reduce_init\" or \"reduce_op\" callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Reduce the work. */ ++ CIRCLE_reduce (work, strlen (work)); ++ return EXECUTION_SUCCESS; ++} +diff -Naur bash-4.3/builtins/Makefile.in mpibash-4.3/builtins/Makefile.in +--- bash-4.3/builtins/Makefile.in 2012-05-25 07:29:19.000000000 -0600 ++++ mpibash-4.3/builtins/Makefile.in 2014-05-13 11:27:37.314100671 -0600 +@@ -141,7 +141,9 @@ + $(srcdir)/times.def $(srcdir)/trap.def $(srcdir)/type.def \ + $(srcdir)/ulimit.def $(srcdir)/umask.def $(srcdir)/wait.def \ + $(srcdir)/reserved.def $(srcdir)/pushd.def $(srcdir)/shopt.def \ +- $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def ++ $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def \ ++ $(srcdir)/mpi.def \ ++@CIRCLE@ $(srcdir)/circle.def + + STATIC_SOURCE = common.c evalstring.c evalfile.c getopt.c bashgetopt.c \ + getopt.h +@@ -153,7 +155,9 @@ + jobs.o kill.o let.o mapfile.o \ + pushd.o read.o return.o set.o setattr.o shift.o source.o \ + suspend.o test.o times.o trap.o type.o ulimit.o umask.o \ +- wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o ++ wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o \ ++ mpi.o \ ++@CIRCLE@ circle.o + + CREATED_FILES = builtext.h builtins.c psize.aux pipesize.h tmpbuiltins.c \ + tmpbuiltins.h +@@ -317,6 +321,8 @@ + getopts.o: getopts.def + reserved.o: reserved.def + complete.o: complete.def ++@CIRCLE@ circle.o: circle.def ++mpi.o: mpi.def + + # C files + bashgetopt.o: ../config.h $(topdir)/bashansi.h $(BASHINCDIR)/ansi_stdlib.h +@@ -644,6 +650,19 @@ + mapfile.o: $(topdir)/subst.h $(topdir)/externs.h $(BASHINCDIR)/maxpath.h + mapfile.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/variables.h $(topdir)/conftypes.h + mapfile.o: $(topdir)/arrayfunc.h ../pathnames.h ++@CIRCLE@ circle.o: $(topdir)/command.h ../config.h $(BASHINCDIR)/memalloc.h $(topdir)/error.h ++@CIRCLE@ circle.o: $(topdir)/general.h $(topdir)/xmalloc.h $(topdir)/subst.h $(topdir)/externs.h ++@CIRCLE@ circle.o: $(topdir)/quit.h $(topdir)/dispose_cmd.h $(topdir)/make_cmd.h ++@CIRCLE@ circle.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/unwind_prot.h $(topdir)/variables.h $(topdir)/conftypes.h ++@CIRCLE@ circle.o: $(BASHINCDIR)/maxpath.h ../pathnames.h ++mpi.o: ../config.h ../config-top.h ../config-bot.h ../bashintl.h ++mpi.o: ../include/gettext.h ../shell.h ../config.h ../bashjmp.h ++mpi.o: ../include/posixjmp.h ../command.h ../syntax.h ../general.h ++mpi.o: ../bashtypes.h ../include/chartypes.h ../xmalloc.h ../bashansi.h ++mpi.o: ../error.h ../variables.h ../array.h ../assoc.h ../hashlib.h ++mpi.o: ../conftypes.h ../arrayfunc.h ../quit.h ../sig.h ../include/maxpath.h ++mpi.o: ../unwind_prot.h ../dispose_cmd.h ../make_cmd.h ../include/ocache.h ++mpi.o: ../subst.h ../pathnames.h ../externs.h common.h bashgetopt.h + + #bind.o: $(RL_LIBSRC)chardefs.h $(RL_LIBSRC)readline.h $(RL_LIBSRC)keymaps.h + +diff -Naur bash-4.3/builtins/mpi.def mpibash-4.3/builtins/mpi.def +--- bash-4.3/builtins/mpi.def 1969-12-31 17:00:00.000000000 -0700 ++++ mpibash-4.3/builtins/mpi.def 2014-05-13 11:27:37.314100671 -0600 +@@ -0,0 +1,744 @@ ++This file is mpi.def, from which is created mpi.c. ++It implements all of the "mpi_*" builtins in Bash. ++ ++$PRODUCES mpi.c ++ ++#include ++ ++#include ++#if defined (HAVE_UNISTD_H) ++# ifdef _MINIX ++# include ++# endif ++# include ++#endif ++ ++#include "../bashintl.h" ++#include "../shell.h" ++#include "common.h" ++#include "bashgetopt.h" ++#include ++ ++extern int running_trap, trap_saved_exit_value; ++ ++/* Keep track of who we are within MPI_COMM_WORLD. */ ++static int mpi_rank; ++static int mpi_num_ranks; ++ ++/* Try an MPI operation. Return with an error message on failure. */ ++#define MPI_TRY(STMT) \ ++ do \ ++ { \ ++ int mpierr; \ ++ mpierr = STMT; \ ++ if (mpierr != MPI_SUCCESS) \ ++ return report_mpi_error (mpierr); \ ++ } \ ++ while (0) ++ ++/* Return with a usage message if no arguments remain. */ ++#define YES_ARGS(LIST) \ ++ if ((LIST) == 0) \ ++ { \ ++ builtin_usage (); \ ++ return (EX_USAGE); \ ++ } ++ ++/* Return with an error message if a given variable is read-only or if ++ * we can't write to it for any other reason (e.g., it's defined as a ++ * function). */ ++#define REQUIRE_WRITABLE(NAME) \ ++ do \ ++ { \ ++ SHELL_VAR *bindvar = find_shell_variable (NAME); \ ++ if (bindvar) \ ++ { \ ++ if (readonly_p (bindvar)) \ ++ { \ ++ err_readonly (NAME); \ ++ return (EXECUTION_FAILURE); \ ++ } \ ++ if (unbind_variable (NAME) == -1) \ ++ { \ ++ builtin_error ("Failed to write to variable %s", NAME); \ ++ return (EXECUTION_FAILURE); \ ++ } \ ++ } \ ++ } \ ++ while (0) ++ ++/* Initialize MPI. */ ++void ++initialize_mpi (argc, argv) ++ int argc; ++ char **argv; ++{ ++ int init_done; ++ ++ MPI_Initialized (&init_done); ++ if (!init_done) ++ MPI_Init (&argc, &argv); ++ MPI_Errhandler_set (MPI_COMM_WORLD, MPI_ERRORS_RETURN); ++ MPI_Comm_rank (MPI_COMM_WORLD, &mpi_rank); ++ MPI_Comm_size (MPI_COMM_WORLD, &mpi_num_ranks); ++} ++ ++/* Finalize MPI. */ ++void ++finalize_mpi () ++{ ++ MPI_Finalize (); ++} ++ ++/* Parse an operation name into an MPI_Op. Return 1 on success, 0 on ++ * failure. */ ++static int ++parse_operation (char *name, MPI_Op *op) ++{ ++ /* Define a mapping from operator names to MPI_Op values. */ ++ typedef struct { ++ char *name; /* Operation name (e.g., "sum") */ ++ MPI_Op value; /* Operation value (e.g., MPI_SUM) */ ++ } opname2value_t; ++ static opname2value_t oplist[] = { ++ {"max", MPI_MAX}, ++ {"min", MPI_MIN}, ++ {"sum", MPI_SUM}, ++ {"prod", MPI_PROD}, ++ {"land", MPI_LAND}, ++ {"band", MPI_BAND}, ++ {"lor", MPI_LOR}, ++ {"bor", MPI_BOR}, ++ {"lxor", MPI_LXOR}, ++ {"bxor", MPI_BXOR}, ++ {"maxloc", MPI_MAXLOC}, ++ {"minloc", MPI_MINLOC} ++ }; ++ size_t i; ++ ++ for (i = 0; i < sizeof(oplist)/sizeof(opname2value_t); i++) ++ if (!strcmp(name, oplist[i].name)) ++ { ++ *op = oplist[i].value; ++ if (i > 0) ++ { ++ /* As a performance optimization, bubble up the value we ++ * just found. */ ++ opname2value_t prev = oplist[i - 1]; ++ oplist[i - 1] = oplist[i]; ++ oplist[i] = prev; ++ } ++ return 1; ++ } ++ return 0; ++} ++ ++/* Report an error to the user and return EXECUTION_FAILURE. */ ++static int ++report_mpi_error (mpierr) ++ int mpierr; ++{ ++ char errstr[MPI_MAX_ERROR_STRING]; ++ int errstrlen; ++ ++ MPI_Error_string (mpierr, errstr, &errstrlen); ++ builtin_error ("%s", errstr); ++ return EXECUTION_FAILURE; ++} ++ ++/* Perform the same operation as bind_variable, but with VALUE being a ++ * number, not a string. */ ++static SHELL_VAR * ++bind_variable_number (name, value, flags) ++ const char *name; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_variable (name, numstr, flags); ++} ++ ++/* Perform the same operation as bind_array_variable, but with VALUE ++ * being a number, not a string. */ ++static SHELL_VAR * ++bind_array_variable_number (name, ind, value, flags) ++ char *name; ++ arrayind_t ind; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_array_variable (name, ind, numstr, flags); ++} ++ ++/* Define a reduction-type function (allreduce, scan, exscan, etc.). */ ++typedef int (*reduction_func_t)(void *, void *, int, MPI_Datatype, MPI_Op, MPI_Comm); ++ ++/* Perform any reduction-type operation (allreduce, scan, exscan, etc.). */ ++static int ++reduction_like (list, funcname, func) ++ WORD_LIST *list; ++ char *funcname; ++ reduction_func_t func; ++{ ++ char *word; /* One argument */ ++ struct { ++ long int value; /* Reduced value */ ++ int rank; /* Rank associated with the above */ ++ } number, result; ++ MPI_Op operation = MPI_SUM; /* Operation to perform */ ++ char *varname; /* Name of the variable to bind the results to */ ++ intmax_t n; ++ int i; ++ ++ /* Parse "-O OPERATION" (optional), where OPERATION is a reduction ++ * operation. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (ISOPTION (word, 'O')) ++ { ++ list = list->next; ++ if (list == 0) ++ { ++ sh_needarg (funcname); ++ return (EX_USAGE); ++ } ++ word = list->word->word; ++ if (!parse_operation (word, &operation)) ++ { ++ sh_invalidopt ("-O"); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ } ++ ++ /* Parse the argument, which must be a number. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &n)) ++ { ++ sh_neednumarg (funcname); ++ return (EX_USAGE); ++ } ++ number.value = (long int) n; ++ number.rank = mpi_rank; ++ list = list->next; ++ ++ /* Parse the target variable, which must not be read-only. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Perform the reduction operation. Bind the given array variable ++ * to the result and, for minloc/maxloc, the associated rank. */ ++ if (mpi_rank != 0 || func != MPI_Exscan) { ++ bind_array_variable (varname, 0, "", 0); ++ bind_array_variable (varname, 1, "", 0); ++ } ++ if (operation == MPI_MINLOC || operation == MPI_MAXLOC) ++ { ++ MPI_TRY (func (&number, &result, 1, MPI_LONG_INT, operation, MPI_COMM_WORLD)); ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ bind_array_variable_number (varname, 1, result.rank, 0); ++ } ++ else ++ MPI_TRY (func (&number.value, &result.value, 1, MPI_LONG, operation, MPI_COMM_WORLD)); ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ bind_array_variable_number (varname, 0, result.value, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_comm_rank ++$FUNCTION mpi_comm_rank_builtin ++$SHORT_DOC mpi_comm_rank name ++Return the process's rank in the MPI job. ++ ++Arguments: ++ NAME Scalar variable in which to receive the rank ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the mpi_comm_rank builtin. */ ++int ++mpi_comm_rank_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Name of the variable to bind the results to */ ++ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ bind_variable_number (varname, mpi_rank, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_comm_size ++$FUNCTION mpi_comm_size_builtin ++$SHORT_DOC mpi_comm_size name ++Return the total number of ranks in the MPI job. ++ ++Arguments: ++ NAME Scalar variable in which to receive the number of ranks ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++ ++/* Here is the mpi_comm_size builtin. */ ++int ++mpi_comm_size_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Name of the variable to bind the results to */ ++ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ bind_variable_number (varname, mpi_num_ranks, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_abort ++$FUNCTION mpi_abort_builtin ++$SHORT_DOC mpi_abort [n] ++Abort all processes in the MPI job and exit the shell. ++ ++Exits not only the caller's shell (with a status of N) but also all ++remote shells that are part of the same MPI job. If N is omitted, the ++exit status is that of the last command executed. ++ ++This command should be used only in extreme circumstances. It is ++better for each process to exit normally on its own. ++$END ++/*'*/ ++ ++/* Here is the mpi_abort builtin. */ ++int ++mpi_abort_builtin (list) ++ WORD_LIST *list; ++{ ++ int exit_value; ++ ++ exit_value = (running_trap == 1 && list == 0) ? trap_saved_exit_value : get_exitstat (list); /* Copied from exit.def */ ++ MPI_TRY (MPI_Abort (MPI_COMM_WORLD, exit_value)); ++ return EXECUTION_FAILURE; ++} ++ ++$BUILTIN mpi_send ++$FUNCTION mpi_send_builtin ++$SHORT_DOC mpi_send [-t tag] rank message ++Send a message to a remote process in the same MPI job. ++ ++Options: ++ -t TAG Send the message using tag TAG (default: 0). TAG must ++ be a nonnegative integer. ++ ++Arguments: ++ RANK Whom to send the message to. RANK must be an integer in ++ the range [0, $(mpi_comm_size)-1]. ++ ++ MESSAGE String to send to rank RANK. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_send builtin. */ ++int ++mpi_send_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ intmax_t target_rank; /* MPI target rank */ ++ char *message; /* Message to send to rank target_rank */ ++ intmax_t tag = 0; /* Message tag to use */ ++ ++ /* Parse "-t TAG" (optional), where TAG is a number or "any". */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (ISOPTION (word, 't')) ++ { ++ list = list->next; ++ if (list == 0) ++ { ++ sh_needarg ("mpi_recv"); ++ return (EX_USAGE); ++ } ++ word = list->word->word; ++ if (!legal_number (word, &tag)) ++ { ++ sh_neednumarg ("-t"); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ } ++ else if (*word == '-') ++ { ++ sh_invalidopt (word); ++ builtin_usage (); ++ return (EX_USAGE); ++ } ++ ++ /* Parse the target rank, which must be a number. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &target_rank)) ++ { ++ builtin_error (_("mpi_send: numeric rank required")); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ ++ /* Parse the message to send. */ ++ YES_ARGS (list); ++ message = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Send the message. */ ++ MPI_TRY (MPI_Send (message, strlen(message)+1, MPI_BYTE, (int)target_rank, (int)tag, MPI_COMM_WORLD)); ++ return EXECUTION_SUCCESS; ++} ++ ++ ++$BUILTIN mpi_recv ++$FUNCTION mpi_recv_builtin ++$SHORT_DOC mpi_recv [-t tag] rank name ++Receive a message from a remote process in the same MPI job. ++ ++Options: ++ -t TAG Receive only messages sent using tag TAG (default: 0). ++ TAG must be either a nonnegative integer or the string ++ "any" to receive messages sent using any tag. ++ ++Arguments: ++ RANK Receive only messages sent from sender RANK. RANK ++ must either be in the range [0, $(mpi_comm_size)-1] or ++ be the string "any" to receive messages from any sender. ++ ++ NAME Array variable in which to receive the message, sender ++ rank, and tag. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_recv builtin. */ ++int ++mpi_recv_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ intmax_t source_rank; /* MPI source rank */ ++ char *endptr; /* Used for parsing strings into numbers */ ++ MPI_Status status; /* Status of an MPI operation */ ++ int count; /* Message length in bytes */ ++ intmax_t tag = 0; /* Message tag to use */ ++ char *varname; /* Name of the variable to bind the results to */ ++ static char *message = NULL; /* Message received from MPI */ ++ static size_t alloced = 0; /* Number of bytes allocated for the above */ ++ int opt; /* Parsed option */ ++ ++ /* Parse any options provided. */ ++ reset_internal_getopt (); ++ while ((opt = internal_getopt (list, "t:")) != -1) ++ { ++ switch (opt) ++ { ++ case 't': ++ if (!strcmp (list_optarg, "any")) ++ tag = MPI_ANY_TAG; ++ else if (!legal_number (list_optarg, &tag)) ++ { ++ builtin_error (_("-t: numeric argument or \"any\" required")); ++ return (EX_USAGE); ++ } ++ break; ++ ++ default: ++ sh_invalidopt (word); ++ builtin_usage (); ++ return (EX_USAGE); ++ } ++ } ++ list = loptend; ++ ++ /* Parse the source rank, which must be a number or "any". */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &source_rank)) ++ { ++ if (!strcmp (word, "any")) ++ source_rank = MPI_ANY_SOURCE; ++ else ++ { ++ builtin_error (_("mpi_recv: numeric rank or \"any\" required")); ++ return (EX_USAGE); ++ } ++ } ++ list = list->next; ++ ++ /* Parse the target variable, which must not be read-only. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Receive a message. Because we don't know long the message will ++ * be, we first probe to get the length. */ ++ MPI_TRY (MPI_Probe ((int)source_rank, (int)tag, MPI_COMM_WORLD, &status)); ++ MPI_TRY (MPI_Get_count (&status, MPI_BYTE, &count)); ++ if (alloced < count) ++ { ++ message = xrealloc (message, count); ++ alloced = count; ++ } ++ MPI_TRY (MPI_Recv (message, count, MPI_BYTE, status.MPI_SOURCE, status.MPI_TAG, MPI_COMM_WORLD, &status)); ++ bind_array_variable (varname, 0, message, 0); ++ bind_array_variable_number (varname, 1, status.MPI_SOURCE, 0); ++ bind_array_variable_number (varname, 2, status.MPI_TAG, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_barrier ++$FUNCTION mpi_barrier_builtin ++$SHORT_DOC mpi_barrier ++Synchronizes all of the processes in the MPI job. ++ ++No process will return from mpi_barrier until all processes have ++called mpi_barrier. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_barrier builtin. */ ++int ++mpi_barrier_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ MPI_TRY (MPI_Barrier (MPI_COMM_WORLD)); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_bcast ++$FUNCTION mpi_bcast_builtin ++$SHORT_DOC mpi_bcast [message] name ++Broadcast a message to all processes in the same MPI job. ++ ++Arguments: ++ MESSAGE String to broadcast from one process to all the others. ++ ++ NAME Scalar variable in which to receive the broadcast message. ++ ++Exactly one process in the MPI job must specify a message to ++broadcast. No process will return from mpi_bcast until all processes ++have called mpi_bcast. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_bcast builtin. */ ++int ++mpi_bcast_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ int root; /* MPI root rank */ ++ char *root_message; /* Message to broadcast */ ++ int msglen; /* Length in bytes of the above (including the NULL byte) */ ++ char *varname; /* Name of the variable to bind the results to */ ++ static int *all_lengths = NULL; /* List of every rank's msglen */ ++ static char *message = NULL; /* Message received from the root */ ++ static int alloced = 0; /* Bytes allocated for the above */ ++ int i; ++ ++ /* Parse the optional message and target variable, which must not be ++ * read-only. */ ++ YES_ARGS (list); ++ if (list->next == NULL) ++ { ++ /* Non-root */ ++ root_message = NULL; ++ msglen = -1; ++ } ++ else ++ { ++ /* Root */ ++ root_message = list->word->word; ++ msglen = (int) strlen(root_message) + 1; ++ list = list->next; ++ } ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Acquire global agreement on the root and the message size. */ ++ if (all_lengths == NULL) ++ all_lengths = xmalloc (mpi_num_ranks*sizeof(int)); ++ MPI_TRY (MPI_Allgather (&msglen, 1, MPI_INT, all_lengths, 1, MPI_INT, MPI_COMM_WORLD)); ++ root = -1; ++ for (i = 0; i < mpi_num_ranks; i++) ++ { ++ if (all_lengths[i] == -1) ++ continue; ++ if (root != -1) ++ { ++ builtin_error (_("mpi_bcast: more than one process specified a message")); ++ return (EXECUTION_FAILURE); ++ } ++ root = i; ++ msglen = all_lengths[i]; ++ } ++ if (root == -1) ++ { ++ builtin_error (_("mpi_bcast: no process specified a message")); ++ return (EXECUTION_FAILURE); ++ } ++ ++ /* Broadcast the message. */ ++ if (mpi_rank == root) ++ { ++ MPI_TRY (MPI_Bcast (root_message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); ++ bind_variable (varname, root_message, 0); ++ } ++ else ++ { ++ if (alloced < msglen) ++ { ++ message = xrealloc (message, msglen); ++ alloced = msglen; ++ } ++ MPI_TRY (MPI_Bcast (message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); ++ bind_variable (varname, message, 0); ++ } ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_scan ++$FUNCTION mpi_scan_builtin ++$SHORT_DOC mpi_scan number name ++Perform an inclusive scan across all processes in the same MPI job. ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the scan operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In an inclusive-scan operation, each process i presents a number, ++a[i]. Once all processes in the MPI job have presented their number, ++the command returns a[0] to rank 0, a[0]+a[1] to rank 1, ++a[0]+a[1]+a[2] to rank 2, and so forth. The -O option enables "+" to ++be replaced with other operations. ++ ++Inclusive scans can be useful for assigning a unique index to each ++process in the MPI job. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_scan builtin. */ ++int ++mpi_scan_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_scan", MPI_Scan); ++} ++ ++$BUILTIN mpi_exscan ++$FUNCTION mpi_exscan_builtin ++$SHORT_DOC mpi_exscan number name ++Perform an exclusive scan across all processes in the same MPI job. ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the scan operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In a exclusive-scan operation, each process i presents a number, a[i]. ++Once all processes in the MPI job have presented their number, the ++command assigns a[0] to NAME on rank 1, a[0]+a[1] to NAME on rank 2, ++a[0]+a[1]+a[2] to NAME on rank 3, and so forth. No assignment is ++performed on rank 0. The -O option enables "+" to be replaced with ++other operations. ++ ++Exclusive scans can be useful for assigning a unique index to each ++process in the MPI job. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_exscan builtin. */ ++int ++mpi_exscan_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_exscan", MPI_Exscan); ++} ++ ++$BUILTIN mpi_allreduce ++$FUNCTION mpi_allreduce_builtin ++$SHORT_DOC mpi_allreduce number name ++Reduce numbers from all processes in an MPI job to a single number. ++ ++Options: ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the allreduce operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In an all-reduce operation, each process i presents a number, a[i]. ++Once all processes in the MPI job have presented their number, the ++command returns a[0]+a[1]+...+a[n-1] to all ranks. The -O option ++enables "+" to be replaced with other operations. ++ ++All-reduces can be useful for reaching global agreement (e.g., of a ++termination condition). ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_allreduce builtin. */ ++int ++mpi_allreduce_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_allreduce", MPI_Allreduce); ++} +diff -Naur bash-4.3/config.h.in mpibash-4.3/config.h.in +--- bash-4.3/config.h.in 2013-06-29 15:35:33.000000000 -0600 ++++ mpibash-4.3/config.h.in 2014-05-13 11:27:37.314100671 -0600 +@@ -1147,6 +1147,12 @@ + /* Define if you have the `__argz_stringify' function. */ + #undef HAVE___ARGZ_STRINGIFY + ++/* Define if you have both the header file and the libcircle library. */ ++#undef HAVE_LIBCIRCLE ++ ++/* Define if you have the `CIRCLE_cb_reduce_op' function. */ ++#undef HAVE_CIRCLE_CB_REDUCE_OP ++ + /* End additions for lib/intl */ + + #include "config-bot.h" +diff -Naur bash-4.3/configure.ac mpibash-4.3/configure.ac +--- bash-4.3/configure.ac 2014-02-11 08:37:53.000000000 -0700 ++++ mpibash-4.3/configure.ac 2014-05-13 11:27:37.302100179 -0600 +@@ -24,7 +24,7 @@ + AC_REVISION([for Bash 4.3, version 4.063])dnl + + define(bashvers, 4.3) +-define(relstatus, release) ++define(relstatus, MPI) + + AC_INIT([bash], bashvers-relstatus, [bug-bash@gnu.org]) + +@@ -813,6 +813,21 @@ + fi + ]) + ++dnl Ensure that we can find an MPI library. ++AC_CHECK_FUNCS([MPI_Init], [], [ ++ AC_MSG_ERROR([Cannot continue without MPI. Consider specifying CC=mpicc.])]) ++ ++dnl If we have Libcircle, use it, too. ++AC_SEARCH_LIBS([CIRCLE_cb_create], [circle], [AC_CHECK_HEADERS([libcircle.h])]) ++if test "x$ac_cv_header_libcircle_h" = xyes; then ++ libcircle_make_prefix="" ++ AC_DEFINE([HAVE_LIBCIRCLE], [1], [Define if you have the Libcircle header and library.]) ++ AC_CHECK_FUNCS([CIRCLE_cb_reduce_op]) ++else ++ libcircle_make_prefix="#" ++fi ++AC_SUBST([CIRCLE], [$libcircle_make_prefix]) ++ + BASH_CHECK_DECL(strtoimax) + BASH_CHECK_DECL(strtol) + BASH_CHECK_DECL(strtoll) +diff -Naur bash-4.3/Makefile.in mpibash-4.3/Makefile.in +--- bash-4.3/Makefile.in 2014-01-25 14:27:30.000000000 -0700 ++++ mpibash-4.3/Makefile.in 2014-05-13 11:27:37.314100671 -0600 +@@ -104,7 +104,7 @@ + VERSPROG = bashversion$(EXEEXT) + VERSOBJ = bashversion.$(OBJEXT) + +-Program = bash$(EXEEXT) ++Program = mpibash$(EXEEXT) + Version = @BASHVERS@ + PatchLevel = `$(BUILD_DIR)/$(VERSPROG) -p` + RELSTATUS = @RELSTATUS@ +diff -Naur bash-4.3/shell.c mpibash-4.3/shell.c +--- bash-4.3/shell.c 2014-01-14 06:04:32.000000000 -0700 ++++ mpibash-4.3/shell.c 2014-05-13 11:27:37.314100671 -0600 +@@ -107,6 +107,13 @@ + extern char *primary_prompt, *secondary_prompt; + extern char *this_command_name; + ++extern void initialize_mpi __P((int, char **)); ++extern void finalize_mpi __P((void)); ++#ifdef HAVE_LIBCIRCLE ++extern void initialize_libcircle __P((int, char **)); ++extern void finalize_libcircle __P((void)); ++#endif ++ + /* Non-zero means that this shell has already been run; i.e. you should + call shell_reinitialize () if you need to start afresh. */ + int shell_initialized = 0; +@@ -324,7 +331,7 @@ + static void init_interactive_script __P((void)); + + static void set_shell_name __P((char *)); +-static void shell_initialize __P((void)); ++static void shell_initialize __P((int, char **)); + static void shell_reinitialize __P((void)); + + static void show_shell_usage __P((FILE *, int)); +@@ -561,7 +568,7 @@ + + /* From here on in, the shell must be a normal functioning shell. + Variables from the environment are expected to be set, etc. */ +- shell_initialize (); ++ shell_initialize (argc, argv); + + set_default_lang (); + set_default_locale_vars (); +@@ -941,6 +948,12 @@ + end_job_control (); + #endif /* JOB_CONTROL */ + ++#ifdef HAVE_LIBCIRCLE ++ finalize_libcircle (); ++#else ++ finalize_mpi (); ++#endif ++ + /* Always return the exit status of the last command to our parent. */ + sh_exit (s); + } +@@ -1691,7 +1704,9 @@ + /* Do whatever is necessary to initialize the shell. + Put new initializations in here. */ + static void +-shell_initialize () ++shell_initialize (argc, argv) ++ int argc; ++ char **argv; + { + char hostname[256]; + +@@ -1760,6 +1775,17 @@ + initialize_shell_options (privileged_mode||running_setuid); + initialize_bashopts (privileged_mode||running_setuid); + #endif ++ ++ /* Initialize Libcircle and MPI. */ ++#ifdef HAVE_LIBCIRCLE ++ initialize_libcircle (argc, argv); ++ initialize_mpi (argc, argv); ++ bind_variable ("libcircle", "yes", 0); ++#else ++ initialize_mpi (argc, argv); ++ bind_variable ("libcircle", "no", 0); ++#endif ++ bind_variable ("mpibash", "yes", 0); + } + + /* Function called by main () when it appears that the shell has already diff --git a/var/spack/packages/mpibash/package.py b/var/spack/packages/mpibash/package.py new file mode 100644 index 0000000000..d0f6dafed6 --- /dev/null +++ b/var/spack/packages/mpibash/package.py @@ -0,0 +1,32 @@ +import os +from spack import * + +class Mpibash(Package): + """Parallel scripting right from the Bourne-Again Shell (Bash)""" + homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html" + + version('4.3', '81348932d5da294953e15d4814c74dd1', + url="http://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz") + + # patch -p1 < ../mpibash-4.3.patch + patch('mpibash-4.3.patch', level=1, when='@4.3') + + # above patch modifies configure.ac + depends_on('autoconf') + + # uses MPI_Exscan which is in MPI-1.2 and later + depends_on('mpi@1.2:') + + depends_on('libcircle') + + def install(self, spec, prefix): + # run autoconf to rebuild configure + autoconf = which('autoconf') + autoconf() + + configure("--prefix=" + prefix, + "CC=mpicc") + + make(parallel=False) + + make("install") -- cgit v1.2.3-70-g09d2 From c774455fc5df90d15d7aad5d239eb59c1ad354eb Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 4 Dec 2014 10:47:01 -0800 Subject: Bugfix in create command. --- lib/spack/spack/cmd/create.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 1b9ad524c4..f0ea867332 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -180,11 +180,11 @@ def create(parser, args): tty.msg("Aborted.") return - sorted_versions = list(reversed(versions)) + sorted_versions = list(reversed(versions.keys())) guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( - versions[:archives_to_fetch], urls[:archives_to_fetch], + sorted_versions[:archives_to_fetch], urls[:archives_to_fetch], first_stage_function=guesser, keep_stage=args.keep_stage) if not ver_hash_tuples: -- cgit v1.2.3-70-g09d2 From ba53ccb6b3d6506f83d2ecc426f3d22736440a1a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 4 Dec 2014 10:51:23 -0800 Subject: Minor tweak: use self.git everywhere in get fetch strategy. --- lib/spack/spack/fetch_strategy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 0e848652ae..c463a0e5d8 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -345,8 +345,7 @@ class GitFetchStrategy(VCSFetchStrategy): @property def git_version(self): - git = which('git', required=True) - vstring = git('--version', return_output=True).lstrip('git version ') + vstring = self.git('--version', return_output=True).lstrip('git version ') return Version(vstring) @@ -356,6 +355,7 @@ class GitFetchStrategy(VCSFetchStrategy): self._git = which('git', required=True) return self._git + @_needs_stage def fetch(self): self.stage.chdir() -- cgit v1.2.3-70-g09d2 From 7dc90c709712bf364b2b0b26686cf07cc038f5ab Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 4 Dec 2014 10:53:52 -0800 Subject: Add experimental gasnet package for legion. --- var/spack/packages/gasnet/package.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 var/spack/packages/gasnet/package.py diff --git a/var/spack/packages/gasnet/package.py b/var/spack/packages/gasnet/package.py new file mode 100644 index 0000000000..705961d1de --- /dev/null +++ b/var/spack/packages/gasnet/package.py @@ -0,0 +1,35 @@ +from spack import * + +class Gasnet(Package): + """GASNet is a language-independent, low-level networking layer + that provides network-independent, high-performance communication + primitives tailored for implementing parallel global address space + SPMD languages and libraries such as UPC, Co-Array Fortran, SHMEM, + Cray Chapel, and Titanium. + """ + homepage = "http://gasnet.lbl.gov" + url = "http://gasnet.lbl.gov/GASNet-1.24.0.tar.gz" + + version('1.24.0', 'c8afdf48381e8b5a7340bdb32ca0f41a') + + + def install(self, spec, prefix): + # TODO: don't use paths with @ in them. + change_sed_delimiter('@', ';', 'configure') + + configure("--prefix=%s" % prefix, + # TODO: factor IB suport out into architecture description. + "--enable-ibv", + "--enable-udp", + "--disable-mpi", + "--enable-par", + "--enable-mpi-compat", + "--enable-segment-fast", + "--disable-aligned-segments", + # TODO: make an option so that Legion can request builds with/without this. + # See the Legion webpage for details on when to/not to use. + "--disable-pshm", + "--with-segment-mmap-max=64MB") + + make() + make("install") -- cgit v1.2.3-70-g09d2 From 588955a98732b208f60a741d00ee3eaabd74f1a6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 5 Dec 2014 08:45:51 -0800 Subject: Disable pkgconfig files until I support this better. --- var/spack/packages/ncurses/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/ncurses/package.py b/var/spack/packages/ncurses/package.py index 2556e600f6..a22e83503f 100644 --- a/var/spack/packages/ncurses/package.py +++ b/var/spack/packages/ncurses/package.py @@ -16,7 +16,7 @@ class Ncurses(Package): configure("--prefix=%s" % prefix, "--with-shared", "--enable-widec", - "--enable-pc-files", + "--disable-pc-files", "--without-ada") make() make("install") -- cgit v1.2.3-70-g09d2 From c3fce7b77fe895307c78676a2692757002199399 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 8 Dec 2014 22:49:49 -0800 Subject: Bugfix in create and checksum --- lib/spack/spack/cmd/checksum.py | 3 +-- lib/spack/spack/cmd/create.py | 37 +++++++++++++++++++------------------ 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 846817b918..8a448450c2 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -56,7 +56,6 @@ def get_checksums(versions, urls, **kwargs): first_stage_function = kwargs.get('first_stage_function', None) keep_stage = kwargs.get('keep_stage', False) - tty.msg("Downloading...") hashes = [] for i, (url, version) in enumerate(zip(urls, versions)): @@ -98,7 +97,7 @@ def checksum(parser, args): if not versions: tty.die("Could not fetch any versions for %s." % pkg.name) - sorted_versions = list(reversed(sorted(versions))) + sorted_versions = sorted(versions, reverse=True) tty.msg("Found %s versions of %s." % (len(versions), pkg.name), *spack.cmd.elide_list( diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index f0ea867332..c09b9a3e36 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -28,6 +28,7 @@ import hashlib import re from contextlib import closing +from external.ordereddict import OrderedDict import llnl.util.tty as tty from llnl.util.filesystem import mkdirp @@ -160,32 +161,32 @@ def create(parser, args): mkdirp(os.path.dirname(pkg_path)) versions = spack.package.find_versions_of_archive(url) + rkeys = sorted(versions.keys(), reverse=True) + versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided versions = { version : url } - else: - urls = [spack.url.substitute_version(url, v) for v in versions] - if len(urls) > 1: - tty.msg("Found %s versions of %s:" % (len(urls), name), - *spack.cmd.elide_list( - ["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) - print - archives_to_fetch = tty.get_number( - "Include how many checksums in the package file?", - default=5, abort='q') - - if not archives_to_fetch: - tty.msg("Aborted.") - return - - sorted_versions = list(reversed(versions.keys())) + elif len(versions) > 1: + tty.msg("Found %s versions of %s:" % (len(versions), name), + *spack.cmd.elide_list( + ["%-10s%s" % (v,u) for v, u in versions.iteritems()])) + print + archives_to_fetch = tty.get_number( + "Include how many checksums in the package file?", + default=5, abort='q') + + if not archives_to_fetch: + tty.msg("Aborted.") + return guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( - sorted_versions[:archives_to_fetch], urls[:archives_to_fetch], - first_stage_function=guesser, keep_stage=args.keep_stage) + versions.keys()[:archives_to_fetch], + [versions[v] for v in versions.keys()[:archives_to_fetch]], + first_stage_function=guesser, + keep_stage=args.keep_stage) if not ver_hash_tuples: tty.die("Could not fetch any tarballs for %s." % name) -- cgit v1.2.3-70-g09d2 From e309b41972b6fd2cf9dd96e53db626694b6b7c35 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 9 Dec 2014 01:07:48 -0800 Subject: Add support for URLs with query strings - support tarballs from raw github URLs --- lib/spack/spack/url.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 27ef6f3b29..6b1f74a06a 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -47,7 +47,9 @@ it's never been told about that version before. import os import re from StringIO import StringIO +from urlparse import urlsplit, urlunsplit +import llnl.util.tty as tty from llnl.util.tty.color import * import spack.error @@ -80,6 +82,19 @@ def find_list_url(url): return os.path.dirname(url) +def strip_url(path): + """Strip query (?..) and fragment (#..) from URLs. Returns URL as two + parts: the URL and the stripped part. + """ + try: + components = urlsplit(path) + stripped = components[:3] + (None, None) + return (urlunsplit(stripped), "?%s#%s" % components[3:5]) + except ValueError: + tty.debug("Got error parsing path %s" % path) + return (path, '') # Ignore URL parse errors here + + def parse_version_offset(path): """Try to extract a version string from a filename or URL. This is taken largely from Homebrew's Version class.""" @@ -88,6 +103,9 @@ def parse_version_offset(path): if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path): path = os.path.dirname(path) + # Remove trailing ?... from URL + path, stripped = strip_url(path) + # Strip archive extension path = comp.strip_extension(path) @@ -187,6 +205,9 @@ def parse_name_offset(path, v=None): if v is None: v = parse_version(path) + # Remove trailing ?... from URL + path, stripped = strip_url(path) + # Strip archive extension path = comp.strip_extension(path) @@ -303,6 +324,9 @@ def wildcard_version(path): # Get name and version, so we can treat them specially name, v = parse_name_and_version(path) + # strip URL query/fragment first. + path, query = strip_url(path) + # protect extensions like bz2 from wildcarding. ext = comp.extension(path) path = comp.strip_extension(path) @@ -326,7 +350,7 @@ def wildcard_version(path): name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts) # Put it all back together with original name matches intact. - return ''.join(name_parts) + '.' + ext + return ''.join(name_parts) + '.' + ext + query def substitute_version(path, new_version): -- cgit v1.2.3-70-g09d2 From 2f90068661589b387526d057bcae524d2d93c97d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Dec 2014 16:22:22 -0800 Subject: Handle cases where tarball is in the URL query string. --- lib/spack/spack/fetch_strategy.py | 2 +- lib/spack/spack/test/url_extrapolate.py | 7 +++ lib/spack/spack/test/url_parse.py | 5 ++ lib/spack/spack/url.py | 93 ++++++++++++++++++++++----------- 4 files changed, 76 insertions(+), 31 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index c463a0e5d8..180e8eb069 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -173,7 +173,7 @@ class URLFetchStrategy(FetchStrategy): # This is some other curl error. Curl will print the # error, but print a spack message too raise FailedDownloadError( - self.url, "Curl failed with error %d", spack.curl.returncode) + self.url, "Curl failed with error %d" % spack.curl.returncode) # Check if we somehow got an HTML file rather than the archive we diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index d381c1a1e4..00d8216020 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -98,3 +98,10 @@ class UrlExtrapolateTest(unittest.TestCase): 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2') self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7', 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2') + + + def test_github_raw(self): + self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7', + 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true') + self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7', + 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true') diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index b8cca1e52a..ae1d559f7c 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -322,3 +322,8 @@ class UrlParseTest(unittest.TestCase): self.check( 'gcc', '4.4.7', 'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2') + + def test_github_raw_url(self): + self.check( + 'PowerParser', '2.0.7', + 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true') diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 6b1f74a06a..2948c12df5 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -82,35 +82,73 @@ def find_list_url(url): return os.path.dirname(url) -def strip_url(path): - """Strip query (?..) and fragment (#..) from URLs. Returns URL as two - parts: the URL and the stripped part. - """ +def strip_query_and_fragment(path): try: components = urlsplit(path) stripped = components[:3] + (None, None) - return (urlunsplit(stripped), "?%s#%s" % components[3:5]) + + query, frag = components[3:5] + suffix = '' + if query: suffix += '?' + query + if frag: suffix += '#' + frag + + return (urlunsplit(stripped), suffix) + except ValueError: tty.debug("Got error parsing path %s" % path) return (path, '') # Ignore URL parse errors here +def split_url_extension(path): + """Some URLs have a query string, e.g.: + + 1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true + 2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz + + In (1), the query string needs to be stripped to get at the + extension, but in (2), the filename is IN a single final query + argument. + + This strips the URL into three pieces: prefix, ext, and suffix. + The suffix contains anything that was stripped off the URL to + get at the file extension. In (1), it will be '?raw=true', but + in (2), it will be empty. e.g.: + + 1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true') + 2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', + '.tar.gz', None) + """ + prefix, ext, suffix = path, '', '' + + # Strip off sourceforge download suffix. + match = re.search(r'((?:sourceforge.net|sf.net)/.*)(/download)$', path) + if match: + prefix, suffix = match.groups() + + ext = comp.extension(prefix) + if ext is not None: + prefix = comp.strip_extension(prefix) + + else: + prefix, suf = strip_query_and_fragment(prefix) + ext = comp.extension(prefix) + prefix = comp.strip_extension(prefix) + suffix = suf + suffix + if ext is None: + ext = '' + + return prefix, ext, suffix + + def parse_version_offset(path): """Try to extract a version string from a filename or URL. This is taken largely from Homebrew's Version class.""" + original_path = path - # Strip off sourceforge download stuffix. - if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path): - path = os.path.dirname(path) - - # Remove trailing ?... from URL - path, stripped = strip_url(path) - - # Strip archive extension - path = comp.strip_extension(path) + path, ext, suffix = split_url_extension(path) - # Take basename to avoid including parent dirs in version name - # Remember the offset of the stem in the full path. + # Allow matches against the basename, to avoid including parent + # dirs in version name Remember the offset of the stem in the path stem = os.path.basename(path) offset = len(path) - len(stem) @@ -128,7 +166,7 @@ def parse_version_offset(path): (r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+_(\d+))$', path), # e.g. https://github.com/hpc/lwgrp/archive/v1.0.1.tar.gz - (r'github.com/[^/]+/[^/]+/archive/v?(\d+(?:\.\d+)*)\.tar\.gz$', path), + (r'github.com/[^/]+/[^/]+/archive/v?(\d+(?:\.\d+)*)$', path), # e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style) (r'[-_](R\d+[AB]\d*(-\d+)?)', path), @@ -190,7 +228,7 @@ def parse_version_offset(path): return version, start, len(version) - raise UndetectableVersionError(path) + raise UndetectableVersionError(original_path) def parse_version(path): @@ -205,11 +243,7 @@ def parse_name_offset(path, v=None): if v is None: v = parse_version(path) - # Remove trailing ?... from URL - path, stripped = strip_url(path) - - # Strip archive extension - path = comp.strip_extension(path) + path, ext, suffix = split_url_extension(path) # Allow matching with either path or stem, as with the version. stem = os.path.basename(path) @@ -324,12 +358,7 @@ def wildcard_version(path): # Get name and version, so we can treat them specially name, v = parse_name_and_version(path) - # strip URL query/fragment first. - path, query = strip_url(path) - - # protect extensions like bz2 from wildcarding. - ext = comp.extension(path) - path = comp.strip_extension(path) + path, ext, suffix = split_url_extension(path) # Construct a case-insensitive regular expression for the package name. name_re = '(%s)' % insensitize(name) @@ -350,7 +379,11 @@ def wildcard_version(path): name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts) # Put it all back together with original name matches intact. - return ''.join(name_parts) + '.' + ext + query + result = ''.join(name_parts) + if ext: + result += '.' + ext + result += suffix + return result def substitute_version(path, new_version): -- cgit v1.2.3-70-g09d2 From 722e73f3096e44240018475146e3d81b13d95937 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 12 Dec 2014 14:53:55 -0800 Subject: Better mirror path calculation. - Add support in spack.url for extrapolating actual file type for URL - Move mirror path computation to mirror.py from package.py --- lib/spack/spack/mirror.py | 3 ++- lib/spack/spack/package.py | 14 ++++---------- lib/spack/spack/url.py | 21 +++++++++++++++++++++ 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 9c700cd551..929c514b61 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -37,6 +37,7 @@ from llnl.util.filesystem import * import spack import spack.error +import spack.url as url import spack.fetch_strategy as fs from spack.spec import Spec from spack.stage import Stage @@ -52,7 +53,7 @@ def mirror_archive_filename(spec): fetcher = spec.package.fetcher if isinstance(fetcher, fs.URLFetchStrategy): # If we fetch this version with a URLFetchStrategy, use URL's archive type - ext = extension(fetcher.url) + ext = url.downloaded_file_extension(fetcher.url) else: # Otherwise we'll make a .tar.gz ourselves ext = 'tar.gz' diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index bb6180c521..d296e6c189 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -51,6 +51,7 @@ import spack import spack.spec import spack.error import spack.compilers +import spack.mirror import spack.hooks import spack.build_environment as build_env import spack.url as url @@ -453,9 +454,9 @@ class Package(object): raise ValueError("Can only get a stage for a concrete package.") if self._stage is None: - self._stage = Stage(self.fetcher, - mirror_path=self.mirror_path(), - name=self.spec.short_spec) + mp = spack.mirror.mirror_archive_filename(self.spec) + self._stage = Stage( + self.fetcher, mirror_path=mp, name=self.spec.short_spec) return self._stage @@ -475,13 +476,6 @@ class Package(object): self._fetcher = f - def mirror_path(self): - """Get path to this package's archive in a mirror.""" - filename = "%s-%s." % (self.name, self.version) - filename += extension(self.url) if self.url else "tar.gz" - return "%s/%s" % (self.name, filename) - - def preorder_traversal(self, visited=None, **kwargs): """This does a preorder traversal of the package's dependence DAG.""" virtual = kwargs.get("virtual", False) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 2948c12df5..58838306af 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -140,6 +140,27 @@ def split_url_extension(path): return prefix, ext, suffix +def downloaded_file_extension(path): + """This returns the type of archive a URL refers to. This is + sometimes confusing becasue of URLs like: + + (1) https://github.com/petdance/ack/tarball/1.93_02 + + Where the URL doesn't actually contain the filename. We need + to know what type it is so that we can appropriately name files + in mirrors. + """ + match = re.search(r'github.com/.+/(zip|tar)ball/', path) + if match: + if match.group(1) == 'zip': return 'zip' + elif match.group(1) == 'tar': return 'tar.gz' + + prefix, ext, suffix = split_url_extension(path) + if not ext: + raise UrlParseError("Cannot deduce archive type in %s" % path, path) + return ext + + def parse_version_offset(path): """Try to extract a version string from a filename or URL. This is taken largely from Homebrew's Version class.""" -- cgit v1.2.3-70-g09d2 From f1c5e64c23037a3dd7fa65794a1f917f9d5f812c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 15 Dec 2014 14:46:34 -0800 Subject: Partial fix for SPACK-48. - Try to accommodate packages that have grown dependencies better. - This will only get fully fixed when optional dependencies are supported and some extra functionality is added to the spec syntax. --- lib/spack/spack/cmd/uninstall.py | 3 +-- lib/spack/spack/directory_layout.py | 31 ++++++++++++++++++------------- lib/spack/spack/packages.py | 3 ++- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 84eb4703a6..e787c460ad 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -72,13 +72,12 @@ def uninstall(parser, args): for s in matching_specs: try: # should work if package is known to spack - pkgs.append(spack.db.get(s)) + pkgs.append(s.package) except spack.packages.UnknownPackageError, e: # The package.py file has gone away -- but still want to uninstall. spack.Package(s).do_uninstall(force=True) - # Sort packages to be uninstalled by the number of installed dependents # This ensures we do things in the right order def num_installed_deps(pkg): diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 9b31aad5fe..42cac0c9d2 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -157,19 +157,24 @@ class SpecHashDirectoryLayout(DirectoryLayout): # Specs from files are assumed normal and concrete spec = Spec(spec_file.read().replace('\n', '')) - # If we do not have a package on hand for this spec, we know - # it is concrete, and we *assume* that it is normal. This - # prevents us from trying to fetch a non-existing package, and - # allows best effort for commands like spack find. - if not spack.db.exists(spec.name): - spec._normal = True - spec._concrete = True - else: - spec.normalize() - if not spec.concrete: - tty.warn("Spec read from installed package is not concrete:", - path, spec) - + if all(spack.db.exists(s.name) for s in spec.traverse()): + copy = spec.copy() + copy.normalize() + if copy.concrete: + return copy # These are specs spack still understands. + + # If we get here, either the spec is no longer in spack, or + # something about its dependencies has changed. So we need to + # just assume the read spec is correct. We'll lose graph + # information if we do this, but this is just for best effort + # for commands like uninstall and find. Currently Spack + # doesn't do anything that needs the graph info after install. + + # TODO: store specs with full connectivity information, so + # that we don't have to normalize or reconstruct based on + # changing dependencies in the Spack tree. + spec._normal = True + spec._concrete = True return spec diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 047d82a93a..25d01fe7eb 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -74,7 +74,8 @@ class PackageDB(object): if not spec in self.instances: package_class = self.get_class_for_package_name(spec.name) try: - self.instances[spec.copy()] = package_class(spec) + copy = spec.copy() + self.instances[copy] = package_class(copy) except Exception, e: raise FailedConstructorError(spec.name, e) -- cgit v1.2.3-70-g09d2 From a9be5e723977629def451d783d6a4a75296060ba Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Thu, 18 Dec 2014 11:31:58 -0800 Subject: add gperftools (tcmalloc and friends) --- var/spack/packages/gperftools/package.py | 38 ++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 var/spack/packages/gperftools/package.py diff --git a/var/spack/packages/gperftools/package.py b/var/spack/packages/gperftools/package.py new file mode 100644 index 0000000000..8900462324 --- /dev/null +++ b/var/spack/packages/gperftools/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Gperftools(Package): + """Google's fast malloc/free implementation, especially for multi-threaded applications. + Contains tcmalloc, heap-checker, heap-profiler, and cpu-profiler.""" + homepage = "https://code.google.com/p/gperftools" + url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz" + + version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz") + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 08f1701e359073e4704489190e7708764cf8208a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 18 Dec 2014 13:32:06 -0800 Subject: Allow fake installations (just make the directory). - Use for debugging. --- lib/spack/spack/cmd/install.py | 6 +++++- lib/spack/spack/package.py | 20 ++++++++++++++------ 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 2374d02feb..2c2deb2803 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -43,6 +43,9 @@ def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check packages against checksum") + subparser.add_argument( + '--fake', action='store_true', dest='fake', + help="Fake install. Just remove the prefix and touch a fake file in it.") subparser.add_argument( 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") @@ -59,4 +62,5 @@ def install(parser, args): package = spack.db.get(spec) package.do_install(keep_prefix=args.keep_prefix, keep_stage=args.keep_stage, - ignore_deps=args.ignore_deps) + ignore_deps=args.ignore_deps, + fake=args.fake) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index bb6180c521..9c67b0bb51 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -709,9 +709,10 @@ class Package(object): Package implementations should override install(). """ # whether to keep the prefix on failure. Default is to destroy it. - keep_prefix = kwargs.get('keep_prefix', False) - keep_stage = kwargs.get('keep_stage', False) - ignore_deps = kwargs.get('ignore_deps', False) + keep_prefix = kwargs.get('keep_prefix', False) + keep_stage = kwargs.get('keep_stage', False) + ignore_deps = kwargs.get('ignore_deps', False) + fake_install = kwargs.get('fake', False) if not self.spec.concrete: raise ValueError("Can only install concrete packages.") @@ -725,7 +726,8 @@ class Package(object): if not ignore_deps: self.do_install_dependencies() - self.do_patch() + if not fake_install: + self.do_patch() # Fork a child process to do the build. This allows each # package authors to have full control over their environment, @@ -750,8 +752,14 @@ class Package(object): build_env.set_build_environment_variables(self) build_env.set_module_variables_for_package(self) - # Subclasses implement install() to do the real work. - self.install(self.spec, self.prefix) + if fake_install: + mkdirp(self.prefix.bin) + touch(join_path(self.prefix.bin, 'fake')) + mkdirp(self.prefix.lib) + mkdirp(self.prefix.man1) + else: + # Subclasses implement install() to do the real work. + self.install(self.spec, self.prefix) # Ensure that something was actually installed. if not os.listdir(self.prefix): -- cgit v1.2.3-70-g09d2 From 5cd4ddaf089471bb256174362122e010566b441e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 18 Dec 2014 21:38:25 -0800 Subject: Fix for SPACK-49. - name conflict in imp.load_source caused this to fail. - Python modules loaded by imp have unique names now. --- lib/spack/spack/__init__.py | 2 +- lib/spack/spack/hooks/__init__.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index da7088640f..6697e00e40 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -26,7 +26,7 @@ import os import tempfile from llnl.util.filesystem import * -# This lives in $prefix/lib/spac/spack/__file__ +# This lives in $prefix/lib/spack/spack/__file__ prefix = ancestor(__file__, 4) # The spack script itself diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 4f0d574e49..98b7f2323f 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -47,8 +47,11 @@ import spack def all_hook_modules(): modules = [] for name in list_modules(spack.hooks_path): + mod_name = __name__ + '.' + name path = join_path(spack.hooks_path, name) + ".py" - modules.append(imp.load_source('spack.hooks', path)) + mod = imp.load_source(mod_name, path) + modules.append(mod) + return modules -- cgit v1.2.3-70-g09d2 From ab3bf6190362747b5c7852cc98963beb160951e0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 19 Dec 2014 11:09:37 -0800 Subject: Fix for SPACK-50 Bad format string in version check. --- bin/spack | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/spack b/bin/spack index 9fbb65f349..b345a5079d 100755 --- a/bin/spack +++ b/bin/spack @@ -25,7 +25,8 @@ ############################################################################## import sys if not sys.version_info[:2] >= (2,6): - sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info) + v_info = sys.version_info[:3] + sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info) import os -- cgit v1.2.3-70-g09d2 From 983f35f32a9f421f93d9cae636aee6f182749b6f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 16 Dec 2014 15:24:23 -0800 Subject: Tweak extrae indentation. --- var/spack/packages/extrae/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/extrae/package.py b/var/spack/packages/extrae/package.py index 3b842bc1ec..b1a3a3e440 100644 --- a/var/spack/packages/extrae/package.py +++ b/var/spack/packages/extrae/package.py @@ -31,7 +31,15 @@ class Extrae(Package): elif 'mvapich2' in spec: mpi = spec['mvapich2'] - configure("--prefix=%s" % prefix, "--with-mpi=%s" % mpi.prefix, "--with-unwind=%s" % spec['libunwind'].prefix, "--with-dyninst=%s" % spec['dyninst'].prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-dwarf=%s" % spec['libdwarf'].prefix, "--with-papi=%s" % spec['papi'].prefix, "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) + configure("--prefix=%s" % prefix, + "--with-mpi=%s" % mpi.prefix, + "--with-unwind=%s" % spec['libunwind'].prefix, + "--with-dyninst=%s" % spec['dyninst'].prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-dwarf=%s" % spec['libdwarf'].prefix, + "--with-papi=%s" % spec['papi'].prefix, + "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, + "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) make() make("install", parallel=False) -- cgit v1.2.3-70-g09d2 From e0b5890ab5ba528e55867071020b624e3b334ca4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 30 Oct 2014 15:02:06 -0700 Subject: Initial versions of Qt and some dependencies. --- var/spack/packages/dbus/package.py | 40 ++++++++++++++++++++++++++++++++++++++ var/spack/packages/pcre/package.py | 15 ++++++++++++++ var/spack/packages/qt/package.py | 18 +++++++++++++++++ 3 files changed, 73 insertions(+) create mode 100644 var/spack/packages/dbus/package.py create mode 100644 var/spack/packages/pcre/package.py create mode 100644 var/spack/packages/qt/package.py diff --git a/var/spack/packages/dbus/package.py b/var/spack/packages/dbus/package.py new file mode 100644 index 0000000000..7f0019d67c --- /dev/null +++ b/var/spack/packages/dbus/package.py @@ -0,0 +1,40 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install dbus +# +# You can always get back here to change things with: +# +# spack edit dbus +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Dbus(Package): + """FIXME: put a proper description of your package here.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.example.com" + url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz" + + version('1.9.0', 'ec6895a4d5c0637b01f0d0e7689e2b36') + version('1.8.8', 'b9f4a18ee3faa1e07c04aa1d83239c43') + version('1.8.6', '6a08ba555d340e9dfe2d623b83c0eea8') + version('1.8.4', '4717cb8ab5b80978fcadf2b4f2f72e1b') + version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/packages/pcre/package.py b/var/spack/packages/pcre/package.py new file mode 100644 index 0000000000..3424048a6c --- /dev/null +++ b/var/spack/packages/pcre/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Pcre(Package): + """The PCRE package contains Perl Compatible Regular Expression + libraries. These are useful for implementing regular expression + pattern matching using the same syntax and semantics as Perl 5.""" + homepage = "http://www.pcre.org""" + url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2" + + version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py new file mode 100644 index 0000000000..aae9367a1b --- /dev/null +++ b/var/spack/packages/qt/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Qt(Package): + """Qt is a comprehensive cross-platform C++ application framework.""" + homepage = "http://qt.io" + + version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', + "http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") + + # depends_on("zlib") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") -- cgit v1.2.3-70-g09d2 From 8edf299dd21cce9f6f297a81a5fb375b3417bf2a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 1 Nov 2014 15:00:24 -0700 Subject: gnutls, nettle, wget, dbus --- var/spack/packages/dbus/package.py | 35 ++++++++++------------------------- var/spack/packages/gnutls/package.py | 22 ++++++++++++++++++++++ var/spack/packages/nettle/package.py | 15 +++++++++++++++ var/spack/packages/qt/package.py | 2 +- var/spack/packages/wget/package.py | 21 +++++++++++++++++++++ 5 files changed, 69 insertions(+), 26 deletions(-) create mode 100644 var/spack/packages/gnutls/package.py create mode 100644 var/spack/packages/nettle/package.py create mode 100644 var/spack/packages/wget/package.py diff --git a/var/spack/packages/dbus/package.py b/var/spack/packages/dbus/package.py index 7f0019d67c..5fee103f03 100644 --- a/var/spack/packages/dbus/package.py +++ b/var/spack/packages/dbus/package.py @@ -1,25 +1,16 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install dbus -# -# You can always get back here to change things with: -# -# spack edit dbus -# -# See the spack documentation for more information on building -# packages. -# from spack import * class Dbus(Package): - """FIXME: put a proper description of your package here.""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.example.com" + """D-Bus is a message bus system, a simple way for applications to + talk to one another. D-Bus supplies both a system daemon (for + events such new hardware device printer queue ) and a + per-user-login-session daemon (for general IPC needs among user + applications). Also, the message bus is built on top of a + general one-to-one message passing framework, which can be used + by any two applications to communicate directly (without going + through the message bus daemon).""" + + homepage = "http://dbus.freedesktop.org/" url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz" version('1.9.0', 'ec6895a4d5c0637b01f0d0e7689e2b36') @@ -28,13 +19,7 @@ class Dbus(Package): version('1.8.4', '4717cb8ab5b80978fcadf2b4f2f72e1b') version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") - def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure("--prefix=%s" % prefix) - - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/packages/gnutls/package.py b/var/spack/packages/gnutls/package.py new file mode 100644 index 0000000000..cf57a24a6d --- /dev/null +++ b/var/spack/packages/gnutls/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Gnutls(Package): + """GnuTLS is a secure communications library implementing the SSL, + TLS and DTLS protocols and technologies around them. It + provides a simple C language application programming interface + (API) to access the secure communications protocols as well as + APIs to parse and write X.509, PKCS #12, OpenPGP and other + required structures. It is aimed to be portable and efficient + with focus on security and interoperability.""" + + homepage = "http://www.gnutls.org" + url = "ftp://ftp.gnutls.org/gcrypt/gnutls/v3.3/gnutls-3.3.9.tar.xz" + + version('3.3.9', 'ff61b77e39d09f1140ab5a9cf52c58b6') + + depends_on("nettle") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/nettle/package.py b/var/spack/packages/nettle/package.py new file mode 100644 index 0000000000..0f20bc06df --- /dev/null +++ b/var/spack/packages/nettle/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Nettle(Package): + """The Nettle package contains the low-level cryptographic library + that is designed to fit easily in many contexts.""" + + homepage = "http://www.example.com" + url = "http://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz" + + version('2.7', '2caa1bd667c35db71becb93c5d89737f') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index aae9367a1b..fe0d4531c1 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -5,7 +5,7 @@ class Qt(Package): homepage = "http://qt.io" version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', - "http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") + url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") # depends_on("zlib") diff --git a/var/spack/packages/wget/package.py b/var/spack/packages/wget/package.py new file mode 100644 index 0000000000..c8fd025122 --- /dev/null +++ b/var/spack/packages/wget/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Wget(Package): + """GNU Wget is a free software package for retrieving files using + HTTP, HTTPS and FTP, the most widely-used Internet protocols. It + is a non-interactive commandline tool, so it may easily be called + from scripts, cron jobs, terminals without X-Windows support, + etc.""" + + homepage = "http://www.gnu.org/software/wget/" + url = "http://ftp.gnu.org/gnu/wget/wget-1.16.tar.xz" + + version('1.16', 'fe102975ab3a6c049777883f1bb9ad07') + + depends_on("openssl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-ssl=openssl") + make() + make("install") -- cgit v1.2.3-70-g09d2 From 01ca61c7cce510f7f80912f5b01bd69043de7008 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 22 Dec 2014 23:23:11 -0800 Subject: Updates for Qt dependencies --- var/spack/packages/ImageMagick/package.py | 1 + var/spack/packages/jpeg/package.py | 2 +- var/spack/packages/lcms/package.py | 19 +++++++++++++++++++ var/spack/packages/libmng/package.py | 19 +++++++++++++++++++ var/spack/packages/qt/package.py | 2 +- 5 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 var/spack/packages/lcms/package.py create mode 100644 var/spack/packages/libmng/package.py diff --git a/var/spack/packages/ImageMagick/package.py b/var/spack/packages/ImageMagick/package.py index ae06368f85..657b9255a3 100644 --- a/var/spack/packages/ImageMagick/package.py +++ b/var/spack/packages/ImageMagick/package.py @@ -5,6 +5,7 @@ class Imagemagick(Package): homepage = "http://www.imagemagic.org" url = "http://www.imagemagick.org/download/ImageMagick-6.8.9-10.tar.gz" + version('6.9.0-0', '2cf094cb86ec518fa5bc669ce2d21613') version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c') version('6.8.9-9', 'e63fed3e3550851328352c708f800676') diff --git a/var/spack/packages/jpeg/package.py b/var/spack/packages/jpeg/package.py index bb5b77e01c..87820467db 100644 --- a/var/spack/packages/jpeg/package.py +++ b/var/spack/packages/jpeg/package.py @@ -5,7 +5,7 @@ class Jpeg(Package): homepage = "http://www.ijg.org" url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" - version('9a', 'b397211ddfd506b92cd5e02a22ac924d') + version('9a', '3353992aecaee1805ef4109aadd433e7') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/lcms/package.py b/var/spack/packages/lcms/package.py new file mode 100644 index 0000000000..a53c2f997a --- /dev/null +++ b/var/spack/packages/lcms/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Lcms(Package): + """Little cms is a color management library. Implements fast + transforms between ICC profiles. It is focused on speed, and is + portable across several platforms (MIT license).""" + homepage = "http://www.littlecms.com" + url = "http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz" + + version('2.6', 'f4c08d38ceade4a664ebff7228910a33') + + depends_on("jpeg") + depends_on("libtiff") + depends_on("zlib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/libmng/package.py b/var/spack/packages/libmng/package.py new file mode 100644 index 0000000000..11d060496c --- /dev/null +++ b/var/spack/packages/libmng/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Libmng(Package): + """libmng -THE reference library for reading, displaying, writing + and examining Multiple-Image Network Graphics. MNG is the animation + extension to the popular PNG image-format.""" + homepage = "http://sourceforge.net/projects/libmng/" + url = "http://downloads.sourceforge.net/project/libmng/libmng-devel/2.0.2/libmng-2.0.2.tar.gz" + + version('2.0.2', '1ffefaed4aac98475ee6267422cbca55') + + depends_on("jpeg") + depends_on("zlib") + depends_on("lcms") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index fe0d4531c1..270a28bf0a 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -7,7 +7,7 @@ class Qt(Package): version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") - # depends_on("zlib") + depends_on("lcms") def install(self, spec, prefix): # FIXME: Modify the configure line to suit your build system here. -- cgit v1.2.3-70-g09d2 From 852c1dc2866fc59b6250288aad6c8d7c988588a8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 23 Dec 2014 16:35:54 -0800 Subject: Print out fetch, build, and total time for builds. --- lib/spack/spack/package.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fa91dbbbea..4ab7ff23cf 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -35,6 +35,7 @@ README. """ import os import re +import time import inspect import subprocess import platform as py_platform @@ -390,6 +391,10 @@ class Package(object): if not hasattr(self, 'list_depth'): self.list_depth = 1 + # Set up some internal variables for timing. + self._fetch_time = 0.0 + self._total_time = 0.0 + @property def version(self): @@ -606,6 +611,7 @@ class Package(object): if not self.spec.concrete: raise ValueError("Can only fetch concrete packages.") + start_time = time.time() if spack.do_checksum and not self.version in self.versions: tty.warn("There is no checksum on file to fetch %s safely." % self.spec.format('$_$@')) @@ -624,6 +630,7 @@ class Package(object): "Will not fetch %s." % self.spec.format('$_$@'), checksum_msg) self.stage.fetch() + self._fetch_time = time.time() - start_time if spack.do_checksum and self.version in self.versions: self.stage.check() @@ -720,6 +727,7 @@ class Package(object): if not ignore_deps: self.do_install_dependencies() + start_time = time.time() if not fake_install: self.do_patch() @@ -765,7 +773,13 @@ class Package(object): if not keep_stage: self.stage.destroy() - tty.msg("Successfully installed %s" % self.name) + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time + + tty.msg("Successfully installed %s." % self.name, + "Fetch: %.2f sec. Build: %.2f sec. Total: %.2f sec." + % (self._fetch_time, build_time, self._total_time)) print_pkg(self.prefix) # Use os._exit here to avoid raising a SystemExit exception, -- cgit v1.2.3-70-g09d2 From b3042db75523b465c088f9aff8efb73049a9c81c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 16:05:45 -0800 Subject: Add patch function to Package, so that packages can define custom patch functions. --- lib/spack/spack/build_environment.py | 7 +++++++ lib/spack/spack/package.py | 16 +++++++++++----- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index a2fcff1f10..87cfa772ca 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -199,3 +199,10 @@ def set_module_variables_for_package(pkg): # Useful directories within the prefix are encapsulated in # a Prefix object. m.prefix = pkg.prefix + + +def setup_package(pkg): + """Execute all environment setup routines.""" + set_compiler_environment_variables(pkg) + set_build_environment_variables(pkg) + set_module_variables_for_package(pkg) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 4ab7ff23cf..1a797e88b1 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -662,8 +662,11 @@ class Package(object): # Kick off the stage first. self.do_stage() + # Package can add its own patch function. + has_patch_fun = hasattr(self, 'patch') and callable(self.patch) + # If there are no patches, note it. - if not self.patches: + if not self.patches and not has_patch_fun: tty.msg("No patches needed for %s." % self.name) return @@ -686,7 +689,7 @@ class Package(object): tty.msg("Already patched %s" % self.name) return - # Apply all the patches for specs that match this on + # Apply all the patches for specs that match this one for spec, patch_list in self.patches.items(): if self.spec.satisfies(spec): for patch in patch_list: @@ -704,6 +707,11 @@ class Package(object): os.remove(bad_file) touch(good_file) + if has_patch_fun: + self.patch() + + tty.msg("Patched %s" % self.name) + def do_install(self, **kwargs): """This class should call this version of the install method. @@ -750,9 +758,7 @@ class Package(object): spack.install_layout.make_path_for_spec(self.spec) # Set up process's build environment before running install. - build_env.set_compiler_environment_variables(self) - build_env.set_build_environment_variables(self) - build_env.set_module_variables_for_package(self) + build_env.setup_package(self) if fake_install: mkdirp(self.prefix.bin) -- cgit v1.2.3-70-g09d2 From 7b71e6fb5a5c8c73c71814ceaa756681afc73c61 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 16:06:30 -0800 Subject: spack env command spack env allows regular commands to be run with a spack build environment. It also displays the spack build environment for a package. --- lib/spack/spack/cmd/env.py | 69 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 lib/spack/spack/cmd/env.py diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py new file mode 100644 index 0000000000..bde76b5daf --- /dev/null +++ b/lib/spack/spack/cmd/env.py @@ -0,0 +1,69 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +from external import argparse +import llnl.util.tty as tty +import spack.cmd +import spack.build_environment as build_env + +description = "Run a command with the environment for a particular spec's install." + +def setup_parser(subparser): + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.") + + +def env(parser, args): + if not args.spec: + tty.die("spack env requires a spec.") + + # Specs may have spaces in them, so if they do, require that the + # caller put a '--' between the spec and the command to be + # executed. If there is no '--', assume that the spec is the + # first argument. + sep = '--' + if sep in args.spec: + s = args.spec.index(sep) + spec = args.spec[:s] + cmd = args.spec[s+1:] + else: + spec = args.spec[0] + cmd = args.spec[1:] + + specs = spack.cmd.parse_specs(spec, concretize=True) + if len(specs) > 1: + tty.die("spack env only takes one spec.") + spec = specs[0] + + build_env.setup_package(spec.package) + + if not cmd: + # If no command act like the "env" command and print out env vars. + for key, val in os.environ.items(): + print "%s=%s" % (key, val) + + else: + # Otherwise execute the command with the new environment + os.execvp(cmd[0], cmd) -- cgit v1.2.3-70-g09d2 From 20388ece86c91c7f2db112b6da38cfb2b8853196 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 16:07:39 -0800 Subject: Clearer code in filter_file. --- lib/spack/llnl/util/filesystem.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 6a04d98a18..24c77a80db 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -63,8 +63,11 @@ def filter_file(regex, repl, *filenames, **kwargs): # Allow strings to use \1, \2, etc. for replacement, like sed if not callable(repl): unescaped = repl.replace(r'\\', '\\') - repl = lambda m: re.sub( - r'\\([0-9])', lambda x: m.group(int(x.group(1))), unescaped) + def replace_groups_with_groupid(m): + def groupid_to_group(x): + return m.group(int(x.group(1))) + return re.sub(r'\\([1-9])', groupid_to_group, unescaped) + repl = replace_groups_with_groupid if string: regex = re.escape(regex) -- cgit v1.2.3-70-g09d2 From d98e475361acdadb3b734fd2dbf97ec603f9b7e8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 16:09:42 -0800 Subject: Qt4 builds successfully with proper RPATHs. --- var/spack/packages/atk/package.py | 18 +++++++++++++++ var/spack/packages/cairo/package.py | 19 +++++++++++++++ var/spack/packages/gdk-pixbuf/package.py | 22 ++++++++++++++++++ var/spack/packages/glib/package.py | 18 +++++++++++++++ var/spack/packages/gtkplus/package.py | 22 ++++++++++++++++++ var/spack/packages/harfbuzz/package.py | 20 ++++++++++++++++ var/spack/packages/icu/package.py | 25 ++++++++++++++++++++ var/spack/packages/libjpeg-turbo/package.py | 20 ++++++++++++++++ var/spack/packages/libmng/package.py | 4 ++++ var/spack/packages/libtiff/package.py | 2 ++ var/spack/packages/libxml2/package.py | 16 +++++++++++++ var/spack/packages/nasm/package.py | 14 +++++++++++ var/spack/packages/pango/package.py | 19 +++++++++++++++ var/spack/packages/pixman/package.py | 18 +++++++++++++++ var/spack/packages/qt/package.py | 36 +++++++++++++++++++++++++---- var/spack/packages/xz/package.py | 16 +++++++++++++ var/spack/packages/yasm/package.py | 16 +++++++++++++ 17 files changed, 300 insertions(+), 5 deletions(-) create mode 100644 var/spack/packages/atk/package.py create mode 100644 var/spack/packages/cairo/package.py create mode 100644 var/spack/packages/gdk-pixbuf/package.py create mode 100644 var/spack/packages/glib/package.py create mode 100644 var/spack/packages/gtkplus/package.py create mode 100644 var/spack/packages/harfbuzz/package.py create mode 100644 var/spack/packages/icu/package.py create mode 100644 var/spack/packages/libjpeg-turbo/package.py create mode 100644 var/spack/packages/libxml2/package.py create mode 100644 var/spack/packages/nasm/package.py create mode 100644 var/spack/packages/pango/package.py create mode 100644 var/spack/packages/pixman/package.py create mode 100644 var/spack/packages/xz/package.py create mode 100644 var/spack/packages/yasm/package.py diff --git a/var/spack/packages/atk/package.py b/var/spack/packages/atk/package.py new file mode 100644 index 0000000000..769805b227 --- /dev/null +++ b/var/spack/packages/atk/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Atk(Package): + """ATK provides the set of accessibility interfaces that are + implemented by other toolkits and applications. Using the ATK + interfaces, accessibility tools have full access to view and + control running applications.""" + homepage = "https://developer.gnome.org/atk/" + url = "http://ftp.gnome.org/pub/gnome/sources/atk/2.14/atk-2.14.0.tar.xz" + + version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b') + + depends_on("glib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/cairo/package.py b/var/spack/packages/cairo/package.py new file mode 100644 index 0000000000..e1ac8aaa7d --- /dev/null +++ b/var/spack/packages/cairo/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Cairo(Package): + """Cairo is a 2D graphics library with support for multiple output devices.""" + homepage = "http://cairographics.org" + url = "http://cairographics.org/releases/cairo-1.14.0.tar.xz" + + version('1.14.0', 'fc3a5edeba703f906f2241b394f0cced') + + depends_on("libpng") + depends_on("glib") + depends_on("pixman") + depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig. + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-tee") + make() + make("install") diff --git a/var/spack/packages/gdk-pixbuf/package.py b/var/spack/packages/gdk-pixbuf/package.py new file mode 100644 index 0000000000..14a5569984 --- /dev/null +++ b/var/spack/packages/gdk-pixbuf/package.py @@ -0,0 +1,22 @@ +from spack import * + +class GdkPixbuf(Package): + """The Gdk Pixbuf is a toolkit for image loading and pixel buffer + manipulation. It is used by GTK+ 2 and GTK+ 3 to load and + manipulate images. In the past it was distributed as part of + GTK+ 2 but it was split off into a separate package in + preparation for the change to GTK+ 3.""" + homepage = "https://developer.gnome.org/gdk-pixbuf/" + url = "http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.31/gdk-pixbuf-2.31.1.tar.xz" + + version('2.31.2', '6be6bbc4f356d4b79ab4226860ab8523') + + depends_on("glib") + depends_on("jpeg") + depends_on("libpng") + depends_on("libtiff") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/glib/package.py b/var/spack/packages/glib/package.py new file mode 100644 index 0000000000..178f0b9df5 --- /dev/null +++ b/var/spack/packages/glib/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Glib(Package): + """The GLib package contains a low-level libraries useful for + providing data structure handling for C, portability wrappers + and interfaces for such runtime functionality as an event loop, + threads, dynamic loading and an object system.""" + homepage = "https://developer.gnome.org/glib/" + url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz" + + version('2.42.1', '89c4119e50e767d3532158605ee9121a') + + depends_on("libffi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/gtkplus/package.py b/var/spack/packages/gtkplus/package.py new file mode 100644 index 0000000000..0ebc7100de --- /dev/null +++ b/var/spack/packages/gtkplus/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Gtkplus(Package): + """The GTK+ 2 package contains libraries used for creating graphical user interfaces for applications.""" + homepage = "http://www.gtk.org" + + version('2.24.25', '612350704dd3aacb95355a4981930c6f', + url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz") + + depends_on("atk") + depends_on("gdk-pixbuf") + depends_on("pango") + + def patch(self): + # remove disable deprecated flag. + filter_file(r'CFLAGS="-DGDK_PIXBUF_DISABLE_DEPRECATED $CFLAGS"', + '', 'configure', string=True) + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/harfbuzz/package.py b/var/spack/packages/harfbuzz/package.py new file mode 100644 index 0000000000..ed7c42a909 --- /dev/null +++ b/var/spack/packages/harfbuzz/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Harfbuzz(Package): + """The Harfbuzz package contains an OpenType text shaping engine.""" + homepage = "http://www.freedesktop.org/wiki/Software/HarfBuzz/" + url = "http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-0.9.37.tar.bz2" + + version('0.9.37', 'bfe733250e34629a188d82e3b971bc1e') + + depends_on("glib") + depends_on("icu") + depends_on("freetype") + + def patch(self): + change_sed_delimiter('@', ';', 'src/Makefile.in') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/icu/package.py b/var/spack/packages/icu/package.py new file mode 100644 index 0000000000..f256ec5712 --- /dev/null +++ b/var/spack/packages/icu/package.py @@ -0,0 +1,25 @@ +from spack import * + +class Icu(Package): + """The International Components for Unicode (ICU) package is a + mature, widely used set of C/C++ libraries providing Unicode and + Globalization support for software applications. ICU is widely + portable and gives applications the same results on all + platforms.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.example.com" + url = "http://download.icu-project.org/files/icu4c/54.1/icu4c-54_1-src.tgz" + + version('54.1', 'e844caed8f2ca24c088505b0d6271bc0') + + + def url_for_version(self, version): + return "http://download.icu-project.org/files/icu4c/%s/icu4c-%s-src.tgz" % ( + version, str(version).replace('.', '_')) + + + def install(self, spec, prefix): + with working_dir("source"): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/libjpeg-turbo/package.py b/var/spack/packages/libjpeg-turbo/package.py new file mode 100644 index 0000000000..07ee183947 --- /dev/null +++ b/var/spack/packages/libjpeg-turbo/package.py @@ -0,0 +1,20 @@ +from spack import * + +class LibjpegTurbo(Package): + """libjpeg-turbo is a fork of the original IJG libjpeg which uses + SIMD to accelerate baseline JPEG compression and + decompression. libjpeg is a library that implements JPEG image + encoding, decoding and transcoding.""" + homepage = "http://libjpeg-turbo.virtualgl.org" + url = "http://downloads.sourceforge.net/libjpeg-turbo/libjpeg-turbo-1.3.1.tar.gz" + + version('1.3.1', '2c3a68129dac443a72815ff5bb374b05') + + # Can use either of these. + depends_on("yasm") + depends_on("nasm") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/libmng/package.py b/var/spack/packages/libmng/package.py index 11d060496c..e5336ea2c2 100644 --- a/var/spack/packages/libmng/package.py +++ b/var/spack/packages/libmng/package.py @@ -13,6 +13,10 @@ class Libmng(Package): depends_on("zlib") depends_on("lcms") + def patch(self): + # jpeg requires stdio to beincluded before its headrs. + filter_file(r'^(\#include \)', '#include\n\\1', 'libmng_types.h') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() diff --git a/var/spack/packages/libtiff/package.py b/var/spack/packages/libtiff/package.py index ec54cf7adf..63c6704cb8 100644 --- a/var/spack/packages/libtiff/package.py +++ b/var/spack/packages/libtiff/package.py @@ -8,6 +8,8 @@ class Libtiff(Package): version('4.0.3', '051c1068e6a0627f461948c365290410') depends_on('jpeg') + depends_on('zlib') + depends_on('xz') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/libxml2/package.py b/var/spack/packages/libxml2/package.py new file mode 100644 index 0000000000..5eaed36d94 --- /dev/null +++ b/var/spack/packages/libxml2/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libxml2(Package): + """Libxml2 is the XML C parser and toolkit developed for the Gnome + project (but usable outside of the Gnome platform), it is free + software available under the MIT License.""" + homepage = "http://xmlsoft.org" + url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz" + + version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--without-python") + make() + make("install") diff --git a/var/spack/packages/nasm/package.py b/var/spack/packages/nasm/package.py new file mode 100644 index 0000000000..933b6a62c5 --- /dev/null +++ b/var/spack/packages/nasm/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Nasm(Package): + """NASM (Netwide Assembler) is an 80x86 assembler designed for + portability and modularity. It includes a disassembler as well.""" + homepage = "http://www.nasm.us" + url = "http://www.nasm.us/pub/nasm/releasebuilds/2.11.06/nasm-2.11.06.tar.xz" + + version('2.11.06', '2b958e9f5d200641e6fc9564977aecc5') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/pango/package.py b/var/spack/packages/pango/package.py new file mode 100644 index 0000000000..df43625bf5 --- /dev/null +++ b/var/spack/packages/pango/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Pango(Package): + """Pango is a library for laying out and rendering of text, with + an emphasis on internationalization. It can be used anywhere + that text layout is needed, though most of the work on Pango so + far has been done in the context of the GTK+ widget toolkit.""" + homepage = "http://www.pango.org" + url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz" + + version('1.36.8', '217a9a753006275215fa9fa127760ece') + + depends_on("harfbuzz") + depends_on("cairo") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/pixman/package.py b/var/spack/packages/pixman/package.py new file mode 100644 index 0000000000..895cbdbca5 --- /dev/null +++ b/var/spack/packages/pixman/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Pixman(Package): + """The Pixman package contains a library that provides low-level + pixel manipulation features such as image compositing and + trapezoid rasterization.""" + homepage = "http://www.pixman.org" + url = "http://cairographics.org/releases/pixman-0.32.6.tar.gz" + + version('0.32.6', '3a30859719a41bd0f5cccffbfefdd4c2') + + depends_on("libpng") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--disable-gtk") + make() + make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 270a28bf0a..01f9de7f3c 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -7,12 +7,38 @@ class Qt(Package): version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") - depends_on("lcms") + # Use system openssl for security. + #depends_on("openssl") - def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. - configure("--prefix=%s" % prefix) + depends_on("glib") + depends_on("gtkplus") + depends_on("libxml2") + depends_on("zlib") + depends_on("dbus") + depends_on("libtiff") + depends_on("libpng") + depends_on("libmng") + depends_on("jpeg") + + def patch(self): + # Fix qmake compilers in the default mkspec + qmake_conf = 'mkspecs/common/g++-base.conf' + filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) + filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) - # FIXME: Add logic to build and install here + + def install(self, spec, prefix): + configure('-v', + '-confirm-license', + '-opensource', + '-prefix', prefix, + '-openssl-linked', + '-dbus-linked', + '-fast', + '-optimized-qmake', + '-no-pch', + '-no-phonon', + '-no-phonon-backend', + '-no-openvg') make() make("install") diff --git a/var/spack/packages/xz/package.py b/var/spack/packages/xz/package.py new file mode 100644 index 0000000000..88c5793018 --- /dev/null +++ b/var/spack/packages/xz/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Xz(Package): + """XZ Utils is free general-purpose data compression software with + high compression ratio. XZ Utils were written for POSIX-like + systems, but also work on some not-so-POSIX systems. XZ Utils are + the successor to LZMA Utils.""" + homepage = "http://tukaani.org/xz/" + url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2" + + version('5.2.0', '867cc8611760240ebf3440bd6e170bb9') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/yasm/package.py b/var/spack/packages/yasm/package.py new file mode 100644 index 0000000000..d3a695b16d --- /dev/null +++ b/var/spack/packages/yasm/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Yasm(Package): + """Yasm is a complete rewrite of the NASM-2.11.06 assembler. It + supports the x86 and AMD64 instruction sets, accepts NASM and + GAS assembler syntaxes and outputs binary, ELF32 and ELF64 + object formats.""" + homepage = "http://yasm.tortall.net" + url = "http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz" + + version('1.3.0', 'fc9e586751ff789b34b1f21d572d96af') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 0bc861db6e8edf470c91be81e60546b0619216de Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 17:55:19 -0800 Subject: Fix up bzip2 install --- lib/spack/llnl/util/filesystem.py | 8 ++++++++ lib/spack/spack/build_environment.py | 1 + var/spack/packages/bzip2/package.py | 17 ++++++++++++++++- 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 24c77a80db..9f08832598 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -30,6 +30,7 @@ import os import sys import re import shutil +import stat import errno import getpass from contextlib import contextmanager, closing @@ -145,6 +146,13 @@ def install(src, dest): shutil.copy(src, dest) set_install_permissions(dest) + src_mode = os.stat(src).st_mode + dest_mode = os.stat(dest).st_mode + if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR + if src_mode | stat.S_IXGRP: dest_mode |= stat.S_IXGRP + if src_mode | stat.S_IXOTH: dest_mode |= stat.S_IXOTH + os.chmod(dest, dest_mode) + def expand_user(path): """Find instances of '%u' in a path and replace with the current user's diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 87cfa772ca..45353ec640 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -190,6 +190,7 @@ def set_module_variables_for_package(pkg): m.makedirs = os.makedirs m.remove = os.remove m.removedirs = os.removedirs + m.symlink = os.symlink m.mkdirp = mkdirp m.install = install diff --git a/var/spack/packages/bzip2/package.py b/var/spack/packages/bzip2/package.py index 83ae88e564..fbd5869a53 100644 --- a/var/spack/packages/bzip2/package.py +++ b/var/spack/packages/bzip2/package.py @@ -1,4 +1,5 @@ from spack import * +from glob import glob class Bzip2(Package): """bzip2 is a freely available, patent free high-quality data @@ -15,5 +16,19 @@ class Bzip2(Package): # No configure system -- have to filter the makefile for this package. filter_file(r'CC=gcc', 'CC=cc', 'Makefile', string=True) - make() + make('-f', 'Makefile-libbz2_so') + make('clean') make("install", "PREFIX=%s" % prefix) + + bzip2_exe = join_path(prefix.bin, 'bzip2') + install('bzip2-shared', bzip2_exe) + for libfile in glob('libbz2.so*'): + install(libfile, prefix.lib) + + bunzip2 = join_path(prefix.bin, 'bunzip2') + remove(bunzip2) + symlink(bzip2_exe, bunzip2) + + bzcat = join_path(prefix.bin, 'bzcat') + remove(bzcat) + symlink(bzip2_exe, bzcat) -- cgit v1.2.3-70-g09d2 From 37bdbdd9906bb4d89f917732325a346c5cdbb31f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 17:55:43 -0800 Subject: URLFetchStrategy now contains exploding tarballs. --- lib/spack/spack/fetch_strategy.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 180e8eb069..a71f3a1531 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -45,7 +45,7 @@ import re import shutil from functools import wraps import llnl.util.tty as tty - +from llnl.util.filesystem import * import spack import spack.error import spack.util.crypto as crypto @@ -205,8 +205,26 @@ class URLFetchStrategy(FetchStrategy): "Failed on expand() for URL %s" % self.url) decompress = decompressor_for(self.archive_file) + + # Expand all tarballs in their own directory to contain + # exploding tarballs. + tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") + mkdirp(tarball_container) + os.chdir(tarball_container) decompress(self.archive_file) + # If the tarball *didn't* explode, move + # the expanded directory up & remove the protector directory. + files = os.listdir(tarball_container) + if len(files) == 1: + expanded_dir = os.path.join(tarball_container, files[0]) + if os.path.isdir(expanded_dir): + shutil.move(expanded_dir, self.stage.path) + os.rmdir(tarball_container) + + # Set the wd back to the stage when done. + self.stage.chdir() + def archive(self, destination): """Just moves this archive to the destination.""" -- cgit v1.2.3-70-g09d2 From b0ce1b81bae627cc9d3e7853683be60b0f47dbed Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 18:42:03 -0800 Subject: Fix SPINDLE and SCR download URLs. --- var/spack/packages/scr/package.py | 2 +- var/spack/packages/spindle/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/scr/package.py b/var/spack/packages/scr/package.py index d456ecaba0..74b75a8742 100644 --- a/var/spack/packages/scr/package.py +++ b/var/spack/packages/scr/package.py @@ -28,7 +28,7 @@ class Scr(Package): """SCR caches checkpoint data in storage on the compute nodes of a Linux cluster to provide a fast, scalable checkpoint/restart capability for MPI codes""" - homepage = "https://computation-rnd.llnl.gov/scr" + homepage = "https://computation.llnl.gov/project/scr/" url = "http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz" depends_on("mpi") diff --git a/var/spack/packages/spindle/package.py b/var/spack/packages/spindle/package.py index fd59282ebb..06a1e14284 100644 --- a/var/spack/packages/spindle/package.py +++ b/var/spack/packages/spindle/package.py @@ -30,7 +30,7 @@ class Spindle(Package): overload on a shared file system when loading dynamically linked libraries, causing site-wide performance problems. """ - homepage = "https://computation-rnd.llnl.gov/spindle" + homepage = "https://computation.llnl.gov/project/spindle/" url = "https://github.com/hpc/Spindle/archive/v0.8.1.tar.gz" list_url = "https://github.com/hpc/Spindle/releases" -- cgit v1.2.3-70-g09d2 From d3e52d9f9ac04c5e0e80ad08e57c7f13862b84c1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 25 Dec 2014 23:13:44 -0800 Subject: Fix lack of sorting in version concretization. --- lib/spack/spack/concretize.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 805604368e..3f569f9dce 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -68,8 +68,9 @@ class DefaultConcretizer(object): # If there are known avaialble versions, return the most recent # version that satisfies the spec pkg = spec.package - valid_versions = [v for v in pkg.versions - if any(v.satisfies(sv) for sv in spec.versions)] + valid_versions = sorted( + [v for v in pkg.versions + if any(v.satisfies(sv) for sv in spec.versions)]) if valid_versions: spec.versions = ver([valid_versions[-1]]) -- cgit v1.2.3-70-g09d2 From 9dabcc870385de329b9fcb6986d5d6688fa7dca8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 26 Dec 2014 00:07:15 -0800 Subject: Git package. --- var/spack/packages/git/package.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/packages/git/package.py diff --git a/var/spack/packages/git/package.py b/var/spack/packages/git/package.py new file mode 100644 index 0000000000..0f1a3ba05b --- /dev/null +++ b/var/spack/packages/git/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Git(Package): + """Git is a free and open source distributed version control + system designed to handle everything from small to very large + projects with speed and efficiency.""" + homepage = "http://git-scm.com" + url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.xz" + + version('2.2.1', '43e01f9d96ba8c11611e0eef0d9f9f28') + + # Use system openssl. + # depends_on("openssl") + + # Use system perl for now. + # depends_on("perl") + # depends_on("pcre") + + depends_on("zlib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--without-pcre", + "--without-python") + + make() + make("install") -- cgit v1.2.3-70-g09d2 From 860f834aad0d6503057693b894d9996143dde312 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 26 Dec 2014 13:52:31 -0800 Subject: spack graph allows plotting specific packages. --- lib/spack/llnl/util/lang.py | 22 ++++++++++++++++++++++ lib/spack/spack/cmd/graph.py | 11 ++++++++++- lib/spack/spack/packages.py | 16 +++++++++++++--- lib/spack/spack/spec.py | 17 +++++++++-------- 4 files changed, 54 insertions(+), 12 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 049d158c6d..db15da0506 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -269,6 +269,28 @@ def in_function(function_name): del stack +def check_kwargs(kwargs, fun): + """Helper for making functions with kwargs. Checks whether the kwargs + are empty after all of them have been popped off. If they're + not, raises an error describing which kwargs are invalid. + + Example:: + + def foo(self, **kwargs): + x = kwargs.pop('x', None) + y = kwargs.pop('y', None) + z = kwargs.pop('z', None) + check_kwargs(kwargs, self.foo) + + # This raises a TypeError: + foo(w='bad kwarg') + """ + if kwargs: + raise TypeError( + "'%s' is an invalid keyword argument for function %s()." + % (next(kwargs.iterkeys()), fun.__name__)) + + class RequiredAttributeError(ValueError): def __init__(self, message): super(RequiredAttributeError, self).__init__(message) diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 39dbfbb150..955be51955 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -22,9 +22,18 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +from external import argparse import spack +import spack.cmd description = "Write out inter-package dependencies in dot graph format" +def setup_parser(subparser): + subparser.add_argument( + 'specs', nargs=argparse.REMAINDER, + help="specs of packages to graph. Default is all packages.") + + def graph(parser, args): - spack.db.graph_dependencies() + specs = spack.cmd.parse_specs(args.specs) + spack.db.graph_dependencies(*specs) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 25d01fe7eb..0aa8090b61 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -30,7 +30,7 @@ import imp import llnl.util.tty as tty from llnl.util.filesystem import join_path -from llnl.util.lang import memoized +from llnl.util.lang import * import spack.error import spack.spec @@ -214,9 +214,12 @@ class PackageDB(object): return cls - def graph_dependencies(self, out=sys.stdout): + def graph_dependencies(self, *specs, **kwargs): """Print out a graph of all the dependencies between package. Graph is in dot format.""" + out = kwargs.pop('out', sys.stdout) + check_kwargs(kwargs, self.graph_dependencies) + out.write('digraph G {\n') out.write(' label = "Spack Dependencies"\n') out.write(' labelloc = "b"\n') @@ -227,8 +230,15 @@ class PackageDB(object): def quote(string): return '"%s"' % string + if not specs: + packages = self.all_packages() + else: + packages = [] + for spec in specs: + packages.extend(s.package for s in spec.normalized().traverse()) + deps = [] - for pkg in self.all_packages(): + for pkg in packages: out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) # Add edges for each depends_on in the package. diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 570bb1191c..5f1385cc1b 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -858,7 +858,7 @@ class Spec(object): def normalized(self): """Return a normalized copy of this spec without modifying this spec.""" clone = self.copy() - clone.normalized() + clone.normalize() return clone @@ -1289,12 +1289,13 @@ class Spec(object): def tree(self, **kwargs): """Prints out this spec and its dependencies, tree-formatted with indentation.""" - color = kwargs.get('color', False) - depth = kwargs.get('depth', False) - showid = kwargs.get('ids', False) - cover = kwargs.get('cover', 'nodes') - indent = kwargs.get('indent', 0) - format = kwargs.get('format', '$_$@$%@$+$=') + color = kwargs.pop('color', False) + depth = kwargs.pop('depth', False) + showid = kwargs.pop('ids', False) + cover = kwargs.pop('cover', 'nodes') + indent = kwargs.pop('indent', 0) + fmt = kwargs.pop('format', '$_$@$%@$+$=') + check_kwargs(kwargs, self.tree) out = "" cur_id = 0 @@ -1311,7 +1312,7 @@ class Spec(object): out += (" " * d) if d > 0: out += "^" - out += node.format(format, color=color) + "\n" + out += node.format(fmt, color=color) + "\n" return out -- cgit v1.2.3-70-g09d2 From 6ffcdc1166642a70978a8631364aa8fd93560b62 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 29 Dec 2014 00:03:35 -0800 Subject: Partially wroking ASCII dependency graph. --- lib/spack/spack/cmd/spec.py | 10 ++- lib/spack/spack/spec.py | 178 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 187 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 5fcb0a9b5a..6f987d96d7 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -44,7 +44,15 @@ def spec(parser, args): print "Normalized" print "------------------------------" spec.normalize() - print spec.tree(color=True, indent=2) + print spec.tree(color=True, indent=2, cover='paths') + + print + print spec.topological_sort(reverse=True) + print + print "Graph" + print "------------------------------" + print spec.graph() + return print "Concretized" print "------------------------------" diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 5f1385cc1b..9239bac08f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -93,6 +93,7 @@ expansion when it is the first character in an id typed on the command line. import sys import itertools import hashlib +from heapq import * from StringIO import StringIO from operator import attrgetter @@ -712,6 +713,15 @@ class Spec(object): raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) + def index(self): + """Return DependencyMap that points to all the dependencies in this + spec.""" + dm = DependencyMap() + for spec in self.traverse(): + dm[spec.name] = spec + return dm + + def flatten(self): """Pull all dependencies up to the root (this spec). Merge constraints for dependencies with the same name, and if they @@ -1316,6 +1326,174 @@ class Spec(object): return out + def graph(self, **kwargs): + N = kwargs.get('node', 'o') # Node character + out = kwargs.get('out', sys.stdout) + indent = kwargs.get('indent', 0) + indent *= ' ' + + topo_order = self.topological_sort(reverse=True) + clone = self.copy() + nodes = clone.index() + + def ordered_deps(node): + deps = node.dependencies + return sorted((d for d in deps), reverse=True) + + frontier = [] + + debug = True + debug = False + + def back_edge(end, start): + assert(end < start) + + if (start - end) > 1: + if debug: + out.write(" " * 80) + + out.write(indent) + out.write("| " * (end + 1)) + out.write("|_" * (start - end - 2)) + out.write("|/ ") + out.write("/ " * (len(frontier) - start)) + out.write("\n") + + if debug: + out.write(" " * 80) + + out.write(indent) + out.write("| " * end) + out.write("|/") + out.write("| " * (start - end - 1)) + out.write(" /" * (len(frontier) - start)) + out.write("\n") + + + def connect_deps(i, deps): + if len(deps) == 1 and deps in frontier: + j = frontier.index(deps) + if j < i: + back_edge(j, i) + else: + if i < j: + frontier.pop(j) + frontier.insert(i, deps) + back_edge(i, j+1) + + elif deps: + frontier.insert(i, deps) + + + def add_deps_to_frontier(node, i): + deps = ordered_deps(node) + connect_deps(i, deps) + for d in deps: + del nodes[d].dependents[node.name] + + + name = topo_order.pop() + add_deps_to_frontier(nodes[name], 0) + + if debug: + out.write("%-80s" % frontier) + + out.write(indent) + out.write('%s %s\n' % (N, name)) + + while topo_order: + if debug: + out.write("%-80s" % frontier) + + # Find last i, len(frontier[i]) > 1 + i = len(frontier) - 1 + for f in frontier[-1::-1]: + if len(f) > 1: break + i -= 1 + + # Expand frontier until there are enough columns for all children. + if i >= 0: + out.write(indent) + out.write("| " * i) + out.write("|\ ") + out.write("\ " * (len(frontier) - i - 1)) + out.write("\n") + + name = frontier[i].pop(0) + deps = [name] + + connect_deps(i, deps) + + else: + name = topo_order.pop() + node = nodes[name] + + # Find the next node in topo order and remove it from + # the frontier. Since specs are single-rooted DAGs, + # the node is always there. If the graph had multiple + # roots, we'd need to handle that case case of a new root. + i, elt = next(f for f in enumerate(frontier) if name in f[1]) + frontier.pop(i) + + out.write("| " * i) + out.write("%s " % N) + out.write("| " * (len(frontier) - i)) + out.write(" %s\n" % name) + + if node.dependencies: + add_deps_to_frontier(node, i) + elif frontier: + if debug: + out.write(" " * 80) + + out.write("| " * i) + out.write(" /" * (len(frontier) - i)) + out.write("\n") + + + out.write("\n") + out.write("%s\n" % frontier) + + # Reverse the lines in the output + #return '\n'.join(reversed(out.getvalue().split('\n'))) + + return "" #out.getvalue() + + + def topological_sort(self, **kwargs): + """Return a list of dependency specs sorted topologically. + This spec is not modified in the process.""" + reverse = kwargs.get('reverse', False) + if not reverse: + parents = lambda s: s.dependents + children = lambda s: s.dependencies + else: + parents = lambda s: s.dependencies + children = lambda s: s.dependents + + spec = self.copy() + nodes = spec.index() + + topo_order = [] + remaining = [name for name in nodes.keys() if not parents(nodes[name])] + heapify(remaining) + + while remaining: + name = heappop(remaining) + topo_order.append(name) + + node = nodes[name] + for dep in children(node).values(): + del parents(dep)[node.name] + if not parents(dep): + heappush(remaining, dep.name) + + if any(parents(s) for s in spec.traverse()): + raise ValueError("Spec has cycles!") + else: + return topo_order + + def __repr__(self): return str(self) -- cgit v1.2.3-70-g09d2 From a6e00f6086467eda633e099d3ed7696bd85184e7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 29 Dec 2014 01:05:21 -0800 Subject: Fix ColorStream --- lib/spack/llnl/util/tty/color.py | 21 ++++++++++++--------- lib/spack/spack/spec.py | 3 ++- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 598e9d44f5..81688d7f14 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -177,17 +177,20 @@ def cescape(string): class ColorStream(object): def __init__(self, stream, color=None): - self.__class__ = type(stream.__class__.__name__, - (self.__class__, stream.__class__), {}) - self.__dict__ = stream.__dict__ - self.color = color - self.stream = stream + self._stream = stream + self._color = color def write(self, string, **kwargs): - if kwargs.get('raw', False): - super(ColorStream, self).write(string) - else: - cwrite(string, self.stream, self.color) + raw = kwargs.get('raw', False) + raw_write = getattr(self._stream, 'write') + + color = self._color + if self._color is None: + if raw: + color=True + else: + color = self._stream.isatty() + raw_write(colorize(string, color=color)) def writelines(self, sequence, **kwargs): raw = kwargs.get('raw', False) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 9239bac08f..3386af8d7f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1328,7 +1328,8 @@ class Spec(object): def graph(self, **kwargs): N = kwargs.get('node', 'o') # Node character - out = kwargs.get('out', sys.stdout) + color = kwargs.get('color', True) + out = kwargs.get('out', ColorStream(sys.stdout, color=color)) indent = kwargs.get('indent', 0) indent *= ' ' -- cgit v1.2.3-70-g09d2 From 226de0a42d9a59c735edaf5075cbfc7ec3c53125 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 29 Dec 2014 01:52:03 -0800 Subject: Spec graph works without color. --- lib/spack/spack/cmd/spec.py | 13 +++++-------- lib/spack/spack/spec.py | 18 +++++++----------- 2 files changed, 12 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 6f987d96d7..71bc12d6f5 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -46,15 +46,12 @@ def spec(parser, args): spec.normalize() print spec.tree(color=True, indent=2, cover='paths') - print - print spec.topological_sort(reverse=True) - print - print "Graph" - print "------------------------------" - print spec.graph() - return - print "Concretized" print "------------------------------" spec.concretize() print spec.tree(color=True, indent=2) + + print "Graph" + print "------------------------------" + spec.graph() + return diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 3386af8d7f..0720fe6212 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1343,7 +1343,6 @@ class Spec(object): frontier = [] - debug = True debug = False def back_edge(end, start): @@ -1367,7 +1366,10 @@ class Spec(object): out.write("| " * end) out.write("|/") out.write("| " * (start - end - 1)) - out.write(" /" * (len(frontier) - start)) + if (start - end) > 1: + out.write("| " * (len(frontier) - start)) + else: + out.write(" /" * (len(frontier) - start)) out.write("\n") @@ -1424,6 +1426,9 @@ class Spec(object): deps = [name] connect_deps(i, deps) + if i+1 < len(frontier) and len(frontier[i+1]) == 1: + deps = frontier.pop(i+1) + connect_deps(i+1, deps) else: name = topo_order.pop() @@ -1452,15 +1457,6 @@ class Spec(object): out.write("\n") - out.write("\n") - out.write("%s\n" % frontier) - - # Reverse the lines in the output - #return '\n'.join(reversed(out.getvalue().split('\n'))) - - return "" #out.getvalue() - - def topological_sort(self, **kwargs): """Return a list of dependency specs sorted topologically. This spec is not modified in the process.""" -- cgit v1.2.3-70-g09d2 From daf1e229f7a5b5210651d5beddaec6ef1ed125bf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 29 Dec 2014 14:28:24 -0800 Subject: More compact graphs: do back edges before forward expansion. --- lib/spack/spack/spec.py | 126 ++++++++++++++++++++++++++++++------------------ 1 file changed, 80 insertions(+), 46 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 0720fe6212..2cd7bfb6be 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1330,105 +1330,140 @@ class Spec(object): N = kwargs.get('node', 'o') # Node character color = kwargs.get('color', True) out = kwargs.get('out', ColorStream(sys.stdout, color=color)) + debug = kwargs.get('debug', False) indent = kwargs.get('indent', 0) indent *= ' ' topo_order = self.topological_sort(reverse=True) + + # Work on a clone so the spec is self contained (no incoming + # parent edges), and so we don't destroy this spec. clone = self.copy() + + # Fast access to nodes in the spec. nodes = clone.index() + frontier = [] def ordered_deps(node): deps = node.dependencies return sorted((d for d in deps), reverse=True) - frontier = [] - - debug = False - def back_edge(end, start): + def back_edge(end, start, **kwargs): assert(end < start) + collapse = kwargs.get('collapse', True) + label = kwargs.get('label', '') # debug label - if (start - end) > 1: - if debug: - out.write(" " * 80) + if (start - end) > 1: + # This part draws a long back edge. out.write(indent) out.write("| " * (end + 1)) out.write("|_" * (start - end - 2)) - out.write("|/ ") - out.write("/ " * (len(frontier) - start)) - out.write("\n") + out.write("|/") + if collapse: + out.write(" ") + out.write("/ " * (len(frontier) - start)) + else: + out.write("| " * (len(frontier) - start)) - if debug: - out.write(" " * 80) + if debug: + out.write(" " * 20) + out.write("%s %s" % (frontier, label)) + out.write("\n") + + # This part draws the final collapsing line out.write(indent) out.write("| " * end) out.write("|/") out.write("| " * (start - end - 1)) - if (start - end) > 1: + if (start - end) > 1 or not collapse: out.write("| " * (len(frontier) - start)) else: out.write(" /" * (len(frontier) - start)) + + if debug: + out.write(" " * 20) + out.write("%s %s" % (frontier, label)) + out.write("\n") - def connect_deps(i, deps): + def connect_deps(i, deps, **kwargs): if len(deps) == 1 and deps in frontier: j = frontier.index(deps) if j < i: - back_edge(j, i) + back_edge(j, i, **kwargs) else: if i < j: frontier.pop(j) frontier.insert(i, deps) - back_edge(i, j+1) + back_edge(i, j+1, **kwargs) + return True elif deps: frontier.insert(i, deps) + return False def add_deps_to_frontier(node, i): deps = ordered_deps(node) - connect_deps(i, deps) + connect_deps(i, deps, label="add_deps") for d in deps: del nodes[d].dependents[node.name] - name = topo_order.pop() - add_deps_to_frontier(nodes[name], 0) + def find(seq, predicate): + for i, elt in enumerate(seq): + if predicate(elt): + return i + return -1 - if debug: - out.write("%-80s" % frontier) + add_deps_to_frontier(self, 0) out.write(indent) - out.write('%s %s\n' % (N, name)) - - while topo_order: - if debug: - out.write("%-80s" % frontier) + out.write('%s %s\n' % (N, self.name)) + topo_order.pop() - # Find last i, len(frontier[i]) > 1 - i = len(frontier) - 1 - for f in frontier[-1::-1]: - if len(f) > 1: break - i -= 1 + while frontier: + # Find an unexpanded part of frontier + i = find(frontier, lambda f: len(f) > 1) # Expand frontier until there are enough columns for all children. if i >= 0: - out.write(indent) - out.write("| " * i) - out.write("|\ ") - out.write("\ " * (len(frontier) - i - 1)) - out.write("\n") + # Do all back connections possible from this element + # before expanding. + back_connect = [d for d in frontier[i] if [d] in frontier[:i]] + for d in back_connect: + j = frontier.index([d]) + frontier[i].remove(d) + connect_deps(i, [d], collapse=False, label="back_connect") + + if not frontier[i]: + frontier.pop(i) + + elif len(frontier[i]) > 1: + name = frontier[i].pop(0) + deps = [name] + + out.write(indent) + out.write("| " * i) + out.write("|\ ") + out.write("\ " * (len(frontier) - i - 1)) + out.write("\n") - name = frontier[i].pop(0) - deps = [name] + connect_deps(i, deps, label="expansion") - connect_deps(i, deps) - if i+1 < len(frontier) and len(frontier[i+1]) == 1: - deps = frontier.pop(i+1) - connect_deps(i+1, deps) + # Handle any back edges to the right + j = i+1 + while j < len(frontier): + deps = frontier.pop(j) + # TODO: semantics of connect_deps are weird. + # TODO: false return means the popped item was put + # TODO: back & not connected. + if not connect_deps(j, deps, label="ending"): + j += 1 else: name = topo_order.pop() @@ -1438,9 +1473,10 @@ class Spec(object): # the frontier. Since specs are single-rooted DAGs, # the node is always there. If the graph had multiple # roots, we'd need to handle that case case of a new root. - i, elt = next(f for f in enumerate(frontier) if name in f[1]) + i = find(frontier, lambda f: name in f) frontier.pop(i) + out.write(indent) out.write("| " * i) out.write("%s " % N) out.write("| " * (len(frontier) - i)) @@ -1449,9 +1485,7 @@ class Spec(object): if node.dependencies: add_deps_to_frontier(node, i) elif frontier: - if debug: - out.write(" " * 80) - + out.write(indent) out.write("| " * i) out.write(" /" * (len(frontier) - i)) out.write("\n") -- cgit v1.2.3-70-g09d2 From bb3dafa3b5d978b7e68eceeb7faf8b5d156f3058 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 29 Dec 2014 20:55:02 -0800 Subject: Reduce number of immediate expand/contracts. --- lib/spack/spack/spec.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 2cd7bfb6be..a8d080b0d2 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1444,18 +1444,30 @@ class Spec(object): frontier.pop(i) elif len(frontier[i]) > 1: - name = frontier[i].pop(0) - deps = [name] - + # Expand forawrd after doing all back connections out.write(indent) out.write("| " * i) - out.write("|\ ") - out.write("\ " * (len(frontier) - i - 1)) - out.write("\n") + out.write("|\\") + + if (i+1 < len(frontier) and len(frontier[i+1]) == 1 + and frontier[i+1][0] in frontier[i]): + # We need to connect to the element to the right. + # Keep lines straight by connecting directly and + # avoiding immediate expand/contract. + name = frontier[i+1][0] + frontier[i].remove(name) + out.write("| " * (len(frontier) - i - 1)) + out.write("\n") - connect_deps(i, deps, label="expansion") - - # Handle any back edges to the right + else: + # Just allow the expansion here. + name = frontier[i].pop(0) + deps = [name] + out.write(" \\" * (len(frontier) - i - 1)) + out.write("\n") + connect_deps(i, deps, label="expansion") + + # Handle any remaining back edges to the right j = i+1 while j < len(frontier): deps = frontier.pop(j) -- cgit v1.2.3-70-g09d2 From dba5d020cdb7b4b306d68aff1a66430730c3b92b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 30 Dec 2014 18:05:47 -0800 Subject: Pipelining back edges works, saves more space. --- lib/spack/spack/spec.py | 113 ++++++++++++++++++++++++++++++------------------ 1 file changed, 71 insertions(+), 42 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a8d080b0d2..caa8b0972f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1349,57 +1349,71 @@ class Spec(object): return sorted((d for d in deps), reverse=True) - def back_edge(end, start, **kwargs): - assert(end < start) - collapse = kwargs.get('collapse', True) - label = kwargs.get('label', '') # debug label + def back_edge(prev_ends, end, start, collapse, label=None): + # Use prev & next for pipelining -- pipelined edges have + # the same start, and they're in sorted order e.g.:: + # + # start + # | |_|_|_|/| + # |/| | |_|/| + # | | |/| | | <-- when doing this line. + # prev end + # + out.write(indent) + f = len(frontier) - if (start - end) > 1: - # This part draws a long back edge. - out.write(indent) - out.write("| " * (end + 1)) - out.write("|_" * (start - end - 2)) - out.write("|/") - if collapse: - out.write(" ") - out.write("/ " * (len(frontier) - start)) - else: - out.write("| " * (len(frontier) - start)) + def advance(to, fun): + for i in range(advance.pos, to): + fun() + advance.pos += 1 + advance.pos = 0 + + for p in prev_ends: + advance(p, lambda: out.write("| ")) + advance(p+1, lambda: out.write("|/")) - if debug: - out.write(" " * 20) - out.write("%s %s" % (frontier, label)) + if end >= 0: + advance(end + 1, lambda: out.write("| ")) + advance(start - 1, lambda: out.write("|_")) + else: + advance(start - 1, lambda: out.write("| ")) - out.write("\n") + if start >= 0: + advance(start, lambda: out.write("|/")) - # This part draws the final collapsing line - out.write(indent) - out.write("| " * end) - out.write("|/") - out.write("| " * (start - end - 1)) - if (start - end) > 1 or not collapse: - out.write("| " * (len(frontier) - start)) + if collapse: + advance(len(frontier), lambda: out.write(" /")) else: - out.write(" /" * (len(frontier) - start)) + advance(len(frontier), lambda: out.write("| ")) if debug: - out.write(" " * 20) - out.write("%s %s" % (frontier, label)) + out.write(" " * 10) + if label: out.write(label) + out.write("%s" % frontier) out.write("\n") - def connect_deps(i, deps, **kwargs): + def connect_deps(i, deps, collapse, label): + """Connect dependencies to frontier at position i.""" if len(deps) == 1 and deps in frontier: j = frontier.index(deps) + + # connect to the left if j < i: - back_edge(j, i, **kwargs) + if i-j > 1: # two lines if distance > 1 + back_edge([], j, i, True, label) + back_edge([j], -1, -1, (i-j == 1), label) + + # connect to the right else: if i < j: frontier.pop(j) frontier.insert(i, deps) - back_edge(i, j+1, **kwargs) + if j-i > 1: + back_edge([], i, j+1, collapse, label) + back_edge([i], -1, -1, not (j-i > 1) and collapse, label) return True elif deps: @@ -1408,8 +1422,10 @@ class Spec(object): def add_deps_to_frontier(node, i): + """Add dependencies to frontier, connecting them if they're fully + expanded, and deleting parent pointers.""" deps = ordered_deps(node) - connect_deps(i, deps, label="add_deps") + connect_deps(i, deps, True, "add_deps") for d in deps: del nodes[d].dependents[node.name] @@ -1432,13 +1448,26 @@ class Spec(object): # Expand frontier until there are enough columns for all children. if i >= 0: - # Do all back connections possible from this element - # before expanding. - back_connect = [d for d in frontier[i] if [d] in frontier[:i]] - for d in back_connect: - j = frontier.index([d]) - frontier[i].remove(d) - connect_deps(i, [d], collapse=False, label="back_connect") + # Figure out how many back connections there are and + # sort them so we do them in order + back = [] + for d in frontier[i]: + b = find(frontier[:i], lambda f: f == [d]) + if b != -1: back.append((b, d)) + + # Do all back connections in sorted order so we can + # pipeline them and save space. + if back: + back.sort() + + prev_ends = [] + for j, (b, d) in enumerate(back): + if i-b > 1: + back_edge(prev_ends, b, i, False) + del prev_ends[:] + prev_ends.append(b) + frontier[i].remove(d) + back_edge(prev_ends, -1, -1, False) if not frontier[i]: frontier.pop(i) @@ -1465,7 +1494,7 @@ class Spec(object): deps = [name] out.write(" \\" * (len(frontier) - i - 1)) out.write("\n") - connect_deps(i, deps, label="expansion") + connect_deps(i, deps, True, "expansion") # Handle any remaining back edges to the right j = i+1 @@ -1474,7 +1503,7 @@ class Spec(object): # TODO: semantics of connect_deps are weird. # TODO: false return means the popped item was put # TODO: back & not connected. - if not connect_deps(j, deps, label="ending"): + if not connect_deps(j, deps, True, "rem_back"): j += 1 else: -- cgit v1.2.3-70-g09d2 From 478af54ccec497c58e408696dd8542ec9a8a82fb Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 31 Dec 2014 14:55:35 -0800 Subject: Color graph edges. --- lib/spack/spack/spec.py | 63 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index caa8b0972f..7aed82e239 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1342,6 +1342,16 @@ class Spec(object): # Fast access to nodes in the spec. nodes = clone.index() + + # Colors associated with each node in the DAG. + # Edges are colored by the node they point to. + all_colors = 'rgbmcyRGBMCY' + colors = dict((name, all_colors[i % len(all_colors)]) + for i, name in enumerate(topo_order)) + def write_edge(string, index, sub=0): + edge = "@%s{%s}" % (colors[frontier[index][sub]], string) + out.write(edge) + frontier = [] def ordered_deps(node): @@ -1363,29 +1373,30 @@ class Spec(object): f = len(frontier) + self._pos = 0 def advance(to, fun): - for i in range(advance.pos, to): + for i in range(self._pos, to): fun() - advance.pos += 1 - advance.pos = 0 + self._pos += 1 for p in prev_ends: - advance(p, lambda: out.write("| ")) - advance(p+1, lambda: out.write("|/")) - + advance(p, lambda: write_edge("| ", self._pos)) + advance(p+1, lambda: write_edge("|/", self._pos)) if end >= 0: - advance(end + 1, lambda: out.write("| ")) - advance(start - 1, lambda: out.write("|_")) + advance(end + 1, lambda: write_edge("| ", self._pos)) + advance(start - 1, lambda: (write_edge("|", self._pos) or + write_edge("_", end))) else: - advance(start - 1, lambda: out.write("| ")) + advance(start - 1, lambda: write_edge("| ", self._pos)) if start >= 0: - advance(start, lambda: out.write("|/")) + advance(start, lambda: (write_edge("|", self._pos) or + write_edge("/", end))) if collapse: - advance(len(frontier), lambda: out.write(" /")) + advance(len(frontier), lambda: write_edge(" /", self._pos)) else: - advance(len(frontier), lambda: out.write("| ")) + advance(len(frontier), lambda: write_edge("| ", self._pos)) if debug: out.write(" " * 10) @@ -1462,11 +1473,11 @@ class Spec(object): prev_ends = [] for j, (b, d) in enumerate(back): + frontier[i].remove(d) if i-b > 1: back_edge(prev_ends, b, i, False) del prev_ends[:] prev_ends.append(b) - frontier[i].remove(d) back_edge(prev_ends, -1, -1, False) if not frontier[i]: @@ -1475,8 +1486,9 @@ class Spec(object): elif len(frontier[i]) > 1: # Expand forawrd after doing all back connections out.write(indent) - out.write("| " * i) - out.write("|\\") + for c in range(i): + write_edge("| ", c) + write_edge("|", i) if (i+1 < len(frontier) and len(frontier[i+1]) == 1 and frontier[i+1][0] in frontier[i]): @@ -1485,14 +1497,19 @@ class Spec(object): # avoiding immediate expand/contract. name = frontier[i+1][0] frontier[i].remove(name) - out.write("| " * (len(frontier) - i - 1)) + + write_edge("\\", i+1) + for c in range(i+1, len(frontier)): + write_edge("| ", c ) out.write("\n") else: # Just allow the expansion here. name = frontier[i].pop(0) deps = [name] - out.write(" \\" * (len(frontier) - i - 1)) + write_edge("\\", i) + for c in range(i+1, len(frontier)): + write_edge(" \\", c) out.write("\n") connect_deps(i, deps, True, "expansion") @@ -1518,17 +1535,21 @@ class Spec(object): frontier.pop(i) out.write(indent) - out.write("| " * i) + for c in range(i): + write_edge("| ", c) out.write("%s " % N) - out.write("| " * (len(frontier) - i)) + for c in range(i, len(frontier)): + write_edge("| ", c) out.write(" %s\n" % name) if node.dependencies: add_deps_to_frontier(node, i) elif frontier: out.write(indent) - out.write("| " * i) - out.write(" /" * (len(frontier) - i)) + for c in range(i): + write_edge("| ", c) + for c in range(i, len(frontier)): + write_edge(" /", c) out.write("\n") -- cgit v1.2.3-70-g09d2 From 0a0291678e283bf154081df67b0a1f5c909d1d19 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 3 Jan 2015 17:45:54 -0800 Subject: Factor graph code out into its own module, rework spack graph. --- lib/spack/spack/cmd/graph.py | 32 ++- lib/spack/spack/cmd/location.py | 1 - lib/spack/spack/cmd/spec.py | 9 +- lib/spack/spack/graph.py | 480 ++++++++++++++++++++++++++++++++++++++++ lib/spack/spack/packages.py | 42 ---- lib/spack/spack/spec.py | 262 ---------------------- 6 files changed, 509 insertions(+), 317 deletions(-) create mode 100644 lib/spack/spack/graph.py diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 955be51955..13efab5fe5 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -23,17 +23,39 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from external import argparse + import spack import spack.cmd +from spack.graph import * -description = "Write out inter-package dependencies in dot graph format" +description = "Generate graphs of package dependency relationships." def setup_parser(subparser): + method = subparser.add_mutually_exclusive_group() + method.add_argument( + '--ascii', action='store_true', + help="Draw graph as ascii to stdout (default).") + method.add_argument( + '--dot', action='store_true', + help="Generate graph in dot format and print to stdout.") + + method.add_argument( + '--concretize', action='store_true', help="Concretize specs before graphing.") + subparser.add_argument( - 'specs', nargs=argparse.REMAINDER, - help="specs of packages to graph. Default is all packages.") + 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.") def graph(parser, args): - specs = spack.cmd.parse_specs(args.specs) - spack.db.graph_dependencies(*specs) + specs = spack.cmd.parse_specs( + args.specs, normalize=True, concretize=args.concretize) + + + if args.dot: # Dot graph only if asked for. + graph_dot(*specs) + + elif specs: # ascii is default: user doesn't need to provide it explicitly + graph_ascii(specs[0]) + for spec in specs[1:]: + print # extra line bt/w independent graphs + graph_ascii(spec) diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 3fc05d471d..509c336b69 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -111,4 +111,3 @@ def location(parser, args): tty.die("Build directory does not exist yet. Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.source_path - diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 71bc12d6f5..e2cb5689c0 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -27,8 +27,8 @@ import spack.cmd import llnl.util.tty as tty -import spack.url as url import spack +import spack.url as url description = "print out abstract and concrete versions of a spec." @@ -44,14 +44,9 @@ def spec(parser, args): print "Normalized" print "------------------------------" spec.normalize() - print spec.tree(color=True, indent=2, cover='paths') + print spec.tree(color=True, indent=2) print "Concretized" print "------------------------------" spec.concretize() print spec.tree(color=True, indent=2) - - print "Graph" - print "------------------------------" - spec.graph() - return diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py new file mode 100644 index 0000000000..142c9c5c8f --- /dev/null +++ b/lib/spack/spack/graph.py @@ -0,0 +1,480 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Functions for graphing DAGs of dependencies. + +This file contains code for graphing DAGs of software packages +(i.e. Spack specs). There are two main functions you probably care +about: + +graph_ascii() will output a colored graph of a spec in ascii format, +knd of like the graph git shows with "git log --graph". + +graph_dot() will output a graph of a spec (or multiple specs) in dot +format. + +Note that ``graph_ascii`` assumes a single spec while ``graph_dot`` +can take a number of specs as input. + +""" +__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] + +from heapq import * + +from llnl.util.lang import * +from llnl.util.tty.color import * + +import spack + + +def topological_sort(spec, **kwargs): + """Topological sort for specs. + + Return a list of dependency specs sorted topologically. The spec + argument is not modified in the process. + + """ + reverse = kwargs.get('reverse', False) + if not reverse: + parents = lambda s: s.dependents + children = lambda s: s.dependencies + else: + parents = lambda s: s.dependencies + children = lambda s: s.dependents + + # Work on a copy so this is nondestructive. + spec = spec.copy() + nodes = spec.index() + + topo_order = [] + remaining = [name for name in nodes.keys() if not parents(nodes[name])] + heapify(remaining) + + while remaining: + name = heappop(remaining) + topo_order.append(name) + + node = nodes[name] + for dep in children(node).values(): + del parents(dep)[node.name] + if not parents(dep): + heappush(remaining, dep.name) + + if any(parents(s) for s in spec.traverse()): + raise ValueError("Spec has cycles!") + else: + return topo_order + + +def find(seq, predicate): + """Find index in seq for which predicate is True. + + Searches the sequence and returns the index of the element for + which the predicate evaluates to True. Returns -1 if the + predicate does not evaluate to True for any element in seq. + + """ + for i, elt in enumerate(seq): + if predicate(elt): + return i + return -1 + + +class AsciiGraph(object): + def __init__(self): + # These can be set after initialization or after a call to + # graph() to change behavior. + self.node_character = 'o' + self.debug = False + self.indent = 0 + + # These are colors in the order they'll be used for edges. + # See llnl.util.tty.color for details on color characters. + self.colors = 'rgbmcyRGBMCY' + + # Internal vars are used in the graph() function and are + # properly initialized there. + self._name_to_color = None # Node name to color + self._out = None # Output stream + self._frontier = None # frontier + self._nodes = None # dict from name -> node + + + def _indent(self): + self._out.write(self.indent * ' ') + + + def _write_edge(self, string, index, sub=0): + """Write a colored edge to the output stream.""" + name = self._frontier[index][sub] + edge = "@%s{%s}" % (self._name_to_color[name], string) + self._out.write(edge) + + + def _connect_deps(self, i, deps, collapse, label): + """Connect dependencies to existing edges in the frontier. + + ``deps`` are to be inserted at position i in the + frontier. This routine determines whether other open edges + should be merged with (if there are other open edges + pointing to the same place) or whether they should just be + inserted as a completely new open edge. + + Open edges that are not fully expanded (i.e. those that point + at multiple places) are left intact. + + Parameters: + + collapse -- whether the frontier is collapsing or staying the + same size. + + label -- optional debug label for the connection. + + Returns: True if the deps were connected to another edge + (i.e. the frontier did not grow) and False if the deps were + NOT already in the frontier (i.e. they were inserted and the + frontier grew). + + """ + if len(deps) == 1 and deps in self._frontier: + j = self._frontier.index(deps) + + # connect to the left + if j < i: + if i-j > 1: # two lines if distance > 1 + self._back_edge([], j, i, True, label) + self._back_edge([j], -1, -1, (i-j == 1), label) + + # connect to the right + else: + if i < j: + self._frontier.pop(j) + self._frontier.insert(i, deps) + if j-i > 1: + self._back_edge([], i, j+1, collapse, label) + self._back_edge([i], -1, -1, not (j-i > 1) and collapse, label) + return True + + elif deps: + self._frontier.insert(i, deps) + return False + + + def _add_deps_to_frontier(self, node, i): + """Add dependencies to frontier. + + Adds the dependencies of to the frontier, and connects + them to other open edges if they match. Also deletes parent + pointers in the node to mark edges as covered. + + """ + deps = sorted((d for d in node.dependencies), reverse=True) + self._connect_deps(i, deps, True, "add_deps") + for d in deps: + del self._nodes[d].dependents[node.name] + + + + def _back_edge(self, prev_ends, end, start, collapse, label=None): + """Write part of a backwards edge in the graph. + + Writes single- or multi-line backward edges in an ascii graph. + For example, a single line edge:: + + | | | | o | + | | | |/ / <-- single-line edge connects two nodes. + | | | o | + + Or a multi-line edge (requires two calls to back_edge):: + + | | | | o | + | |_|_|/ / <-- multi-line edge crosses vertical edges. + |/| | | | + o | | | | + + Also handles "pipelined" edges, where the same line contains + parts of multiple edges:: + + o start + | |_|_|_|/| + |/| | |_|/| <-- this line has parts of 2 edges. + | | |/| | | + o o + + Arguments: + + prev_ends -- indices in frontier of previous edges that need + to be finished on this line. + + end -- end of the current edge on this line. + + start -- start index of the current edge. + + collapse -- whether the graph will be collapsing (i.e. whether + to slant the end of the line or keep it straight) + + label -- optional debug label to print after the line. + + """ + def advance(to_pos, edges): + """Write edges up to .""" + for i in range(self._pos, to_pos): + for e in edges(): + self._write_edge(*e) + self._pos += 1 + + flen = len(self._frontier) + self._pos = 0 + self._indent() + + for p in prev_ends: + advance(p, lambda: [("| ", self._pos)] ) + advance(p+1, lambda: [("|/", self._pos)] ) + + if end >= 0: + advance(end + 1, lambda: [("| ", self._pos)] ) + advance(start - 1, lambda: [("|", self._pos), ("_", end)] ) + else: + advance(start - 1, lambda: [("| ", self._pos)] ) + + if start >= 0: + advance(start, lambda: [("|", self._pos), ("/", end)] ) + + if collapse: + advance(flen, lambda: [(" /", self._pos)] ) + else: + advance(flen, lambda: [("| ", self._pos)] ) + + if self.debug: + self._out.write(" " * 10) + if label: + self._out.write(label) + self._out.write("%s" % self._frontier) + + self._out.write("\n") + + + def write(self, spec, **kwargs): + """Write out an ascii graph of the provided spec. + + Arguments: + spec -- spec to graph. This only handles one spec at a time. + + Optional arguments: + + out -- file object to write out to (default is sys.stdout) + + color -- whether to write in color. Default is to autodetect + based on output file. + + """ + out = kwargs.get('out', None) + if not out: + out = sys.stdout + + color = kwargs.get('color', None) + if not color: + color = out.isatty() + self._out = ColorStream(sys.stdout, color=color) + + # We'll traverse the spec in topo order as we graph it. + topo_order = topological_sort(spec, reverse=True) + + # Work on a copy to be nondestructive + spec = spec.copy() + self._nodes = spec.index() + + # Colors associated with each node in the DAG. + # Edges are colored by the node they point to. + self._name_to_color = dict((name, self.colors[i % len(self.colors)]) + for i, name in enumerate(topo_order)) + + # This array tracks the open edges at the frontier of the + # graph we're writing out. + self._frontier = [] + + self._add_deps_to_frontier(spec, 0) + self._indent() + self._out.write('%s %s\n' % (self.node_character, spec.name)) + topo_order.pop() + + while self._frontier: + # Find an unexpanded part of frontier + i = find(self._frontier, lambda f: len(f) > 1) + + # Expand frontier until there are enough columns for all children. + if i >= 0: + # Figure out how many back connections there are and + # sort them so we do them in order + back = [] + for d in self._frontier[i]: + b = find(self._frontier[:i], lambda f: f == [d]) + if b != -1: back.append((b, d)) + + # Do all back connections in sorted order so we can + # pipeline them and save space. + if back: + back.sort() + prev_ends = [] + for j, (b, d) in enumerate(back): + self._frontier[i].remove(d) + if i-b > 1: + self._back_edge(prev_ends, b, i, False) + del prev_ends[:] + prev_ends.append(b) + self._back_edge(prev_ends, -1, -1, False) + + if not self._frontier[i]: + self._frontier.pop(i) + + elif len(self._frontier[i]) > 1: + # Expand forawrd after doing all back connections + self._indent() + for c in range(i): + self._write_edge("| ", c) + self._write_edge("|", i) + + if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1 + and self._frontier[i+1][0] in self._frontier[i]): + # We need to connect to the element to the right. + # Keep lines straight by connecting directly and + # avoiding immediate expand/contract. + name = self._frontier[i+1][0] + self._frontier[i].remove(name) + + self._write_edge("\\", i+1) + for c in range(i+1, len(self._frontier)): + self._write_edge("| ", c ) + self._out.write("\n") + + else: + # Just allow the expansion here. + name = self._frontier[i].pop(0) + deps = [name] + self._write_edge("\\", i) + for c in range(i+1, len(self._frontier)): + self._write_edge(" \\", c) + self._out.write("\n") + self._connect_deps(i, deps, True, "expansion") + + # Handle any remaining back edges to the right + j = i+1 + while j < len(self._frontier): + deps = self._frontier.pop(j) + if not self._connect_deps(j, deps, True, "rem_back"): + j += 1 + + else: + name = topo_order.pop() + node = self._nodes[name] + + # Find the next node in topo order and remove it from + # the frontier. Since specs are single-rooted DAGs, + # the node is always there. If the graph had multiple + # roots, we'd need to handle that case case of a new root. + i = find(self._frontier, lambda f: name in f) + self._frontier.pop(i) + + self._indent() + for c in range(i): + self._write_edge("| ", c) + self._out.write("%s " % self.node_character) + for c in range(i, len(self._frontier)): + self._write_edge("| ", c) + self._out.write(" %s\n" % name) + + if node.dependencies: + self._add_deps_to_frontier(node, i) + elif self._frontier: + self._indent() + for c in range(i): + self._write_edge("| ", c) + for c in range(i, len(self._frontier)): + self._write_edge(" /", c) + self._out.write("\n") + + +def graph_ascii(spec, **kwargs): + node_character = kwargs.get('node', 'o') + out = kwargs.pop('out', None) + debug = kwargs.pop('debug', False) + indent = kwargs.pop('indent', 0) + color = kwargs.pop('color', None) + check_kwargs(kwargs, graph_ascii) + + graph = AsciiGraph() + graph.debug = debug + graph.indent = indent + graph.node_character = node_character + + graph.write(spec, color=color, out=out) + + + +def graph_dot(*specs, **kwargs): + """Generate a graph in dot format of all provided specs. + + Print out a dot formatted graph of all the dependencies between + package. Output can be passed to graphviz, e.g.: + + spack graph --dot qt | dot -Tpdf > spack-graph.pdf + + """ + out = kwargs.pop('out', sys.stdout) + check_kwargs(kwargs, graph_dot) + + out.write('digraph G {\n') + out.write(' label = "Spack Dependencies"\n') + out.write(' labelloc = "b"\n') + out.write(' rankdir = "LR"\n') + out.write(' ranksep = "5"\n') + out.write('\n') + + def quote(string): + return '"%s"' % string + + if not specs: + packages = spack.db.all_packages() + else: + packages = [] + for spec in specs: + packages.extend(s.package for s in spec.normalized().traverse()) + + deps = [] + for pkg in packages: + out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) + + # Add edges for each depends_on in the package. + for dep_name, dep in pkg.dependencies.iteritems(): + deps.append((pkg.name, dep_name)) + + # If the package provides something, add an edge for that. + for provider in set(p.name for p in pkg.provided): + deps.append((provider, pkg.name)) + + out.write('\n') + + for pair in deps: + out.write(' "%s" -> "%s"\n' % pair) + out.write('}\n') diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 0aa8090b61..db43d3909a 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -214,48 +214,6 @@ class PackageDB(object): return cls - def graph_dependencies(self, *specs, **kwargs): - """Print out a graph of all the dependencies between package. - Graph is in dot format.""" - out = kwargs.pop('out', sys.stdout) - check_kwargs(kwargs, self.graph_dependencies) - - out.write('digraph G {\n') - out.write(' label = "Spack Dependencies"\n') - out.write(' labelloc = "b"\n') - out.write(' rankdir = "LR"\n') - out.write(' ranksep = "5"\n') - out.write('\n') - - def quote(string): - return '"%s"' % string - - if not specs: - packages = self.all_packages() - else: - packages = [] - for spec in specs: - packages.extend(s.package for s in spec.normalized().traverse()) - - deps = [] - for pkg in packages: - out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) - - # Add edges for each depends_on in the package. - for dep_name, dep in pkg.dependencies.iteritems(): - deps.append((pkg.name, dep_name)) - - # If the package provides something, add an edge for that. - for provider in set(p.name for p in pkg.provided): - deps.append((provider, pkg.name)) - - out.write('\n') - - for pair in deps: - out.write(' "%s" -> "%s"\n' % pair) - out.write('}\n') - - class UnknownPackageError(spack.error.SpackError): """Raised when we encounter a package spack doesn't have.""" def __init__(self, name): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 7aed82e239..2f4fe9ca24 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -93,7 +93,6 @@ expansion when it is the first character in an id typed on the command line. import sys import itertools import hashlib -from heapq import * from StringIO import StringIO from operator import attrgetter @@ -1326,267 +1325,6 @@ class Spec(object): return out - def graph(self, **kwargs): - N = kwargs.get('node', 'o') # Node character - color = kwargs.get('color', True) - out = kwargs.get('out', ColorStream(sys.stdout, color=color)) - debug = kwargs.get('debug', False) - indent = kwargs.get('indent', 0) - indent *= ' ' - - topo_order = self.topological_sort(reverse=True) - - # Work on a clone so the spec is self contained (no incoming - # parent edges), and so we don't destroy this spec. - clone = self.copy() - - # Fast access to nodes in the spec. - nodes = clone.index() - - # Colors associated with each node in the DAG. - # Edges are colored by the node they point to. - all_colors = 'rgbmcyRGBMCY' - colors = dict((name, all_colors[i % len(all_colors)]) - for i, name in enumerate(topo_order)) - def write_edge(string, index, sub=0): - edge = "@%s{%s}" % (colors[frontier[index][sub]], string) - out.write(edge) - - frontier = [] - - def ordered_deps(node): - deps = node.dependencies - return sorted((d for d in deps), reverse=True) - - - def back_edge(prev_ends, end, start, collapse, label=None): - # Use prev & next for pipelining -- pipelined edges have - # the same start, and they're in sorted order e.g.:: - # - # start - # | |_|_|_|/| - # |/| | |_|/| - # | | |/| | | <-- when doing this line. - # prev end - # - out.write(indent) - - f = len(frontier) - - self._pos = 0 - def advance(to, fun): - for i in range(self._pos, to): - fun() - self._pos += 1 - - for p in prev_ends: - advance(p, lambda: write_edge("| ", self._pos)) - advance(p+1, lambda: write_edge("|/", self._pos)) - if end >= 0: - advance(end + 1, lambda: write_edge("| ", self._pos)) - advance(start - 1, lambda: (write_edge("|", self._pos) or - write_edge("_", end))) - else: - advance(start - 1, lambda: write_edge("| ", self._pos)) - - if start >= 0: - advance(start, lambda: (write_edge("|", self._pos) or - write_edge("/", end))) - - if collapse: - advance(len(frontier), lambda: write_edge(" /", self._pos)) - else: - advance(len(frontier), lambda: write_edge("| ", self._pos)) - - if debug: - out.write(" " * 10) - if label: out.write(label) - out.write("%s" % frontier) - - out.write("\n") - - - def connect_deps(i, deps, collapse, label): - """Connect dependencies to frontier at position i.""" - if len(deps) == 1 and deps in frontier: - j = frontier.index(deps) - - # connect to the left - if j < i: - if i-j > 1: # two lines if distance > 1 - back_edge([], j, i, True, label) - back_edge([j], -1, -1, (i-j == 1), label) - - # connect to the right - else: - if i < j: - frontier.pop(j) - frontier.insert(i, deps) - if j-i > 1: - back_edge([], i, j+1, collapse, label) - back_edge([i], -1, -1, not (j-i > 1) and collapse, label) - return True - - elif deps: - frontier.insert(i, deps) - return False - - - def add_deps_to_frontier(node, i): - """Add dependencies to frontier, connecting them if they're fully - expanded, and deleting parent pointers.""" - deps = ordered_deps(node) - connect_deps(i, deps, True, "add_deps") - for d in deps: - del nodes[d].dependents[node.name] - - - def find(seq, predicate): - for i, elt in enumerate(seq): - if predicate(elt): - return i - return -1 - - - add_deps_to_frontier(self, 0) - out.write(indent) - out.write('%s %s\n' % (N, self.name)) - topo_order.pop() - - while frontier: - # Find an unexpanded part of frontier - i = find(frontier, lambda f: len(f) > 1) - - # Expand frontier until there are enough columns for all children. - if i >= 0: - # Figure out how many back connections there are and - # sort them so we do them in order - back = [] - for d in frontier[i]: - b = find(frontier[:i], lambda f: f == [d]) - if b != -1: back.append((b, d)) - - # Do all back connections in sorted order so we can - # pipeline them and save space. - if back: - back.sort() - - prev_ends = [] - for j, (b, d) in enumerate(back): - frontier[i].remove(d) - if i-b > 1: - back_edge(prev_ends, b, i, False) - del prev_ends[:] - prev_ends.append(b) - back_edge(prev_ends, -1, -1, False) - - if not frontier[i]: - frontier.pop(i) - - elif len(frontier[i]) > 1: - # Expand forawrd after doing all back connections - out.write(indent) - for c in range(i): - write_edge("| ", c) - write_edge("|", i) - - if (i+1 < len(frontier) and len(frontier[i+1]) == 1 - and frontier[i+1][0] in frontier[i]): - # We need to connect to the element to the right. - # Keep lines straight by connecting directly and - # avoiding immediate expand/contract. - name = frontier[i+1][0] - frontier[i].remove(name) - - write_edge("\\", i+1) - for c in range(i+1, len(frontier)): - write_edge("| ", c ) - out.write("\n") - - else: - # Just allow the expansion here. - name = frontier[i].pop(0) - deps = [name] - write_edge("\\", i) - for c in range(i+1, len(frontier)): - write_edge(" \\", c) - out.write("\n") - connect_deps(i, deps, True, "expansion") - - # Handle any remaining back edges to the right - j = i+1 - while j < len(frontier): - deps = frontier.pop(j) - # TODO: semantics of connect_deps are weird. - # TODO: false return means the popped item was put - # TODO: back & not connected. - if not connect_deps(j, deps, True, "rem_back"): - j += 1 - - else: - name = topo_order.pop() - node = nodes[name] - - # Find the next node in topo order and remove it from - # the frontier. Since specs are single-rooted DAGs, - # the node is always there. If the graph had multiple - # roots, we'd need to handle that case case of a new root. - i = find(frontier, lambda f: name in f) - frontier.pop(i) - - out.write(indent) - for c in range(i): - write_edge("| ", c) - out.write("%s " % N) - for c in range(i, len(frontier)): - write_edge("| ", c) - out.write(" %s\n" % name) - - if node.dependencies: - add_deps_to_frontier(node, i) - elif frontier: - out.write(indent) - for c in range(i): - write_edge("| ", c) - for c in range(i, len(frontier)): - write_edge(" /", c) - out.write("\n") - - - def topological_sort(self, **kwargs): - """Return a list of dependency specs sorted topologically. - This spec is not modified in the process.""" - reverse = kwargs.get('reverse', False) - if not reverse: - parents = lambda s: s.dependents - children = lambda s: s.dependencies - else: - parents = lambda s: s.dependencies - children = lambda s: s.dependents - - spec = self.copy() - nodes = spec.index() - - topo_order = [] - remaining = [name for name in nodes.keys() if not parents(nodes[name])] - heapify(remaining) - - while remaining: - name = heappop(remaining) - topo_order.append(name) - - node = nodes[name] - for dep in children(node).values(): - del parents(dep)[node.name] - if not parents(dep): - heappush(remaining, dep.name) - - if any(parents(s) for s in spec.traverse()): - raise ValueError("Spec has cycles!") - else: - return topo_order - - def __repr__(self): return str(self) -- cgit v1.2.3-70-g09d2 From b4b8339d0d13e508119813877559f33049eefbd1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 3 Jan 2015 17:58:37 -0800 Subject: bugfix for dot graphs of virtual packages. --- lib/spack/spack/cmd/graph.py | 2 +- lib/spack/spack/graph.py | 26 ++++++++++++++++---------- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 13efab5fe5..58a411be71 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -39,7 +39,7 @@ def setup_parser(subparser): '--dot', action='store_true', help="Generate graph in dot format and print to stdout.") - method.add_argument( + subparser.add_argument( '--concretize', action='store_true', help="Concretize specs before graphing.") subparser.add_argument( diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 142c9c5c8f..08bd6f18bb 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -46,6 +46,7 @@ from llnl.util.lang import * from llnl.util.tty.color import * import spack +from spack.spec import Spec def topological_sort(spec, **kwargs): @@ -455,23 +456,28 @@ def graph_dot(*specs, **kwargs): return '"%s"' % string if not specs: - packages = spack.db.all_packages() + specs = [p.name for p in spack.db.all_packages()] else: - packages = [] - for spec in specs: - packages.extend(s.package for s in spec.normalized().traverse()) + roots = specs + specs = set() + for spec in roots: + specs.update(Spec(s.name) for s in spec.normalized().traverse()) deps = [] - for pkg in packages: - out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name)) + for spec in specs: + out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name)) + + # Skip virtual specs (we'll find out about them from concrete ones. + if spec.virtual: + continue # Add edges for each depends_on in the package. - for dep_name, dep in pkg.dependencies.iteritems(): - deps.append((pkg.name, dep_name)) + for dep_name, dep in spec.package.dependencies.iteritems(): + deps.append((spec.name, dep_name)) # If the package provides something, add an edge for that. - for provider in set(p.name for p in pkg.provided): - deps.append((provider, pkg.name)) + for provider in set(s.name for s in spec.package.provided): + deps.append((provider, spec.name)) out.write('\n') -- cgit v1.2.3-70-g09d2 From 5d033fbd0aed96770bd6802dbece6df1a5c8540e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 4 Jan 2015 18:49:22 -0800 Subject: Expansion works properly, simplified graph code. --- lib/spack/spack/cmd/graph.py | 4 +- lib/spack/spack/graph.py | 234 +++++++++++++++++++++++++++---------------- 2 files changed, 148 insertions(+), 90 deletions(-) diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index 58a411be71..f8cd18d91f 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -55,7 +55,7 @@ def graph(parser, args): graph_dot(*specs) elif specs: # ascii is default: user doesn't need to provide it explicitly - graph_ascii(specs[0]) + graph_ascii(specs[0], debug=spack.debug) for spec in specs[1:]: print # extra line bt/w independent graphs - graph_ascii(spec) + graph_ascii(spec, debug=spack.debug) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 08bd6f18bb..c4f5de1ebc 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -29,7 +29,30 @@ This file contains code for graphing DAGs of software packages about: graph_ascii() will output a colored graph of a spec in ascii format, -knd of like the graph git shows with "git log --graph". +kind of like the graph git shows with "git log --graph", e.g.:: + + o mpileaks + |\ + | |\ + | o | callpath + |/| | + | |\| + | |\ \ + | | |\ \ + | | | | o adept-utils + | |_|_|/| + |/| | | | + o | | | | mpi + / / / / + | | o | dyninst + | |/| | + |/|/| | + | | |/ + | o | libdwarf + |/ / + o | libelf + / + o boost graph_dot() will output a graph of a spec (or multiple specs) in dot format. @@ -102,11 +125,16 @@ def find(seq, predicate): return -1 +# Names of different graph line states. We Record previous line +# states so that we can easily determine what to do when connecting. +states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') +NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states + class AsciiGraph(object): def __init__(self): # These can be set after initialization or after a call to # graph() to change behavior. - self.node_character = 'o' + self.node_character = '*' self.debug = False self.indent = 0 @@ -120,6 +148,7 @@ class AsciiGraph(object): self._out = None # Output stream self._frontier = None # frontier self._nodes = None # dict from name -> node + self._prev_state = None # State of previous line def _indent(self): @@ -133,7 +162,7 @@ class AsciiGraph(object): self._out.write(edge) - def _connect_deps(self, i, deps, collapse, label): + def _connect_deps(self, i, deps, label=None): """Connect dependencies to existing edges in the frontier. ``deps`` are to be inserted at position i in the @@ -147,9 +176,6 @@ class AsciiGraph(object): Parameters: - collapse -- whether the frontier is collapsing or staying the - same size. - label -- optional debug label for the connection. Returns: True if the deps were connected to another edge @@ -161,20 +187,25 @@ class AsciiGraph(object): if len(deps) == 1 and deps in self._frontier: j = self._frontier.index(deps) - # connect to the left - if j < i: - if i-j > 1: # two lines if distance > 1 - self._back_edge([], j, i, True, label) - self._back_edge([j], -1, -1, (i-j == 1), label) - - # connect to the right - else: - if i < j: - self._frontier.pop(j) - self._frontier.insert(i, deps) - if j-i > 1: - self._back_edge([], i, j+1, collapse, label) - self._back_edge([i], -1, -1, not (j-i > 1) and collapse, label) + # convert a right connection into a left connection + if i < j: + self._frontier.pop(j) + self._frontier.insert(i, deps) + return self._connect_deps(j, deps, label) + + collapse = True + if self._prev_state == EXPAND_RIGHT: + # Special case for when prev. line expanded (spacing is off by 1) + # Need two lines here even when distance in frontier is 1. + self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i,j))) + collapse = False + + elif i-j > 1: + # We need two lines to connect if distance > 1 + self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) + collapse = False + + self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j))) return True elif deps: @@ -182,22 +213,20 @@ class AsciiGraph(object): return False - def _add_deps_to_frontier(self, node, i): - """Add dependencies to frontier. - - Adds the dependencies of to the frontier, and connects - them to other open edges if they match. Also deletes parent - pointers in the node to mark edges as covered. - - """ - deps = sorted((d for d in node.dependencies), reverse=True) - self._connect_deps(i, deps, True, "add_deps") - for d in deps: - del self._nodes[d].dependents[node.name] + def _set_state(self, state, label=None): + if state not in states: + raise ValueError("Invalid graph state!") + self._prev_state = state + if self.debug: + self._out.write(" " * 20) + self._out.write("%-20s" % ( + str(self._prev_state) if self._prev_state else '')) + self._out.write("%-20s" % (str(label) if label else '')) + self._out.write("%s" % self._frontier) - def _back_edge(self, prev_ends, end, start, collapse, label=None): + def _back_edge_line(self, prev_ends, end, start, collapse, label=None): """Write part of a backwards edge in the graph. Writes single- or multi-line backward edges in an ascii graph. @@ -267,12 +296,64 @@ class AsciiGraph(object): else: advance(flen, lambda: [("| ", self._pos)] ) - if self.debug: - self._out.write(" " * 10) - if label: - self._out.write(label) - self._out.write("%s" % self._frontier) + self._set_state(BACK_EDGE, label) + self._out.write("\n") + + + def _node_line(self, index, name): + """Writes a line with a node at index.""" + self._indent() + for c in range(index): + self._write_edge("| ", c) + + self._out.write("%s " % self.node_character) + + for c in range(index+1, len(self._frontier)): + self._write_edge("| ", c) + + self._out.write(" %s" % name) + self._set_state(NODE) + self._out.write("\n") + + + def _collapse_line(self, index): + """Write a collapsing line after a node was added at index.""" + self._indent() + for c in range(index): + self._write_edge("| ", c) + for c in range(index, len(self._frontier)): + self._write_edge(" /", c) + self._set_state(COLLAPSE) + self._out.write("\n") + + + def _merge_right_line(self, index): + """Edge at index is same as edge to right. Merge directly with '\'""" + self._indent() + for c in range(index): + self._write_edge("| ", c) + self._write_edge("|", index) + self._write_edge("\\", index+1) + for c in range(index+1, len(self._frontier)): + self._write_edge("| ", c ) + + self._set_state(MERGE_RIGHT) + self._out.write("\n") + + + def _expand_right_line(self, index): + self._indent() + for c in range(index): + self._write_edge("| ", c) + + self._write_edge("|", index) + self._write_edge("\\", index+1) + + for c in range(index+2, len(self._frontier)): + self._write_edge(" \\", c) + + self._set_state(EXPAND_RIGHT) self._out.write("\n") @@ -311,27 +392,22 @@ class AsciiGraph(object): self._name_to_color = dict((name, self.colors[i % len(self.colors)]) for i, name in enumerate(topo_order)) - # This array tracks the open edges at the frontier of the - # graph we're writing out. - self._frontier = [] - - self._add_deps_to_frontier(spec, 0) - self._indent() - self._out.write('%s %s\n' % (self.node_character, spec.name)) - topo_order.pop() - + # Frontier tracks open edges of the graph as it's written out. + self._frontier = [[spec.name]] while self._frontier: # Find an unexpanded part of frontier i = find(self._frontier, lambda f: len(f) > 1) - # Expand frontier until there are enough columns for all children. if i >= 0: + # Expand frontier until there are enough columns for all children. + # Figure out how many back connections there are and # sort them so we do them in order back = [] for d in self._frontier[i]: b = find(self._frontier[:i], lambda f: f == [d]) - if b != -1: back.append((b, d)) + if b != -1: + back.append((b, d)) # Do all back connections in sorted order so we can # pipeline them and save space. @@ -341,79 +417,61 @@ class AsciiGraph(object): for j, (b, d) in enumerate(back): self._frontier[i].remove(d) if i-b > 1: - self._back_edge(prev_ends, b, i, False) + self._back_edge_line(prev_ends, b, i, False, 'left-1') del prev_ends[:] prev_ends.append(b) - self._back_edge(prev_ends, -1, -1, False) + self._back_edge_line(prev_ends, -1, -1, False, 'left-2') if not self._frontier[i]: self._frontier.pop(i) elif len(self._frontier[i]) > 1: - # Expand forawrd after doing all back connections - self._indent() - for c in range(i): - self._write_edge("| ", c) - self._write_edge("|", i) + # Expand forward after doing all back connections if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1 and self._frontier[i+1][0] in self._frontier[i]): # We need to connect to the element to the right. # Keep lines straight by connecting directly and - # avoiding immediate expand/contract. + # avoiding unnecessary expand/contract. name = self._frontier[i+1][0] self._frontier[i].remove(name) - - self._write_edge("\\", i+1) - for c in range(i+1, len(self._frontier)): - self._write_edge("| ", c ) - self._out.write("\n") + self._merge_right_line(i) else: # Just allow the expansion here. name = self._frontier[i].pop(0) deps = [name] - self._write_edge("\\", i) - for c in range(i+1, len(self._frontier)): - self._write_edge(" \\", c) - self._out.write("\n") - self._connect_deps(i, deps, True, "expansion") + self._frontier.insert(i, deps) + self._expand_right_line(i) + + self._frontier.pop(i) + self._connect_deps(i, deps, "post-expand") + # Handle any remaining back edges to the right j = i+1 while j < len(self._frontier): deps = self._frontier.pop(j) - if not self._connect_deps(j, deps, True, "rem_back"): + if not self._connect_deps(j, deps, "back-from-right"): j += 1 else: + # Nothing to expand; add dependencies for a node. name = topo_order.pop() node = self._nodes[name] - # Find the next node in topo order and remove it from - # the frontier. Since specs are single-rooted DAGs, - # the node is always there. If the graph had multiple - # roots, we'd need to handle that case case of a new root. + # Find the named node in the frontier and draw it. i = find(self._frontier, lambda f: name in f) - self._frontier.pop(i) - - self._indent() - for c in range(i): - self._write_edge("| ", c) - self._out.write("%s " % self.node_character) - for c in range(i, len(self._frontier)): - self._write_edge("| ", c) - self._out.write(" %s\n" % name) + self._node_line(i, name) + # Replace node with its dependencies + self._frontier.pop(i) if node.dependencies: - self._add_deps_to_frontier(node, i) + deps = sorted((d for d in node.dependencies), reverse=True) + self._connect_deps(i, deps, "new-deps") # anywhere. + elif self._frontier: - self._indent() - for c in range(i): - self._write_edge("| ", c) - for c in range(i, len(self._frontier)): - self._write_edge(" /", c) - self._out.write("\n") + self._collapse_line(i) def graph_ascii(spec, **kwargs): -- cgit v1.2.3-70-g09d2 From 935eba23579af1d64e56fea38ab981846e127645 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 5 Jan 2015 02:33:15 -0500 Subject: Allow commands to return error codes. --- bin/spack | 10 +++++++++- lib/spack/spack/cmd/graph.py | 5 +++++ lib/spack/spack/cmd/md5.py | 1 + 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/bin/spack b/bin/spack index b345a5079d..626d9d9d11 100755 --- a/bin/spack +++ b/bin/spack @@ -103,7 +103,7 @@ if args.insecure: # Try to load the particular command asked for and run it command = spack.cmd.get_command(args.command) try: - command(parser, args) + return_val = command(parser, args) except SpackError, e: if spack.debug: # In debug mode, raise with a full stack trace. @@ -116,3 +116,11 @@ except SpackError, e: except KeyboardInterrupt: sys.stderr.write('\n') tty.die("Keyboard interrupt.") + +# Allow commands to return values if they want to exit with some ohter code. +if return_val is None: + sys.exit(0) +elif isinstance(return_val, int): + sys.exit(return_val) +else: + tty.die("Bad return value from command %s: %s" % (args.command, return_val)) diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index f8cd18d91f..cb93a1b543 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -31,6 +31,8 @@ from spack.graph import * description = "Generate graphs of package dependency relationships." def setup_parser(subparser): + setup_parser.parser = subparser + method = subparser.add_mutually_exclusive_group() method.add_argument( '--ascii', action='store_true', @@ -50,6 +52,9 @@ def graph(parser, args): specs = spack.cmd.parse_specs( args.specs, normalize=True, concretize=args.concretize) + if not specs: + setup_parser.parser.print_help() + return 1 if args.dot: # Dot graph only if asked for. graph_dot(*specs) diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py index 496835c64b..dfa1be412b 100644 --- a/lib/spack/spack/cmd/md5.py +++ b/lib/spack/spack/cmd/md5.py @@ -41,6 +41,7 @@ def setup_parser(subparser): def md5(parser, args): if not args.files: setup_parser.parser.print_help() + return 1 for f in args.files: if not os.path.isfile(f): -- cgit v1.2.3-70-g09d2 From cd9e4b5b7f1aa13b846e48fbc07a7f847a21f46a Mon Sep 17 00:00:00 2001 From: Justin Too Date: Tue, 6 Jan 2015 11:12:02 -0800 Subject: (Package) Add Oracle JDK package --- var/spack/packages/jdk/package.py | 46 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 var/spack/packages/jdk/package.py diff --git a/var/spack/packages/jdk/package.py b/var/spack/packages/jdk/package.py new file mode 100644 index 0000000000..8f8076dd14 --- /dev/null +++ b/var/spack/packages/jdk/package.py @@ -0,0 +1,46 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +#------------------------------------------------------------------------------ +import distutils +from distutils import dir_util +from subprocess import call + +import spack +from spack import * +import llnl.util.tty as tty + +class Jdk(Package): + """The Java Development Kit (JDK) released by Oracle Corporation + in the form of a binary product aimed at Java developers.""" + homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" + + version('8u25-linux-x64', 'e145c03a7edc845215092786bcfba77e', + url="http://download.oracle.com/otn-pub/java/jdk/8u25-b17/jdk-8u25-linux-x64.tar.gz") + + # Oracle requires that you accept their License Agreement in order + # to access the Java packages in download.oracle.com. In order to + # automate this process, we need to utilize these additional curl + # commandline options. + # + # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux + curl_options=[ + '-j', # junk cookies + '-H', # specify required License Agreement cookie + 'Cookie: oraclelicense=accept-securebackup-cookie'] + + def do_fetch(self): + # Add our custom curl commandline options + tty.msg( + "[Jdk] Adding required commandline options to curl " + + "before performing fetch: %s" % + (self.curl_options)) + + for option in self.curl_options: + spack.curl.add_default_arg(option) + + # Now perform the actual fetch + super(Jdk, self).do_fetch() + + + def install(self, spec, prefix): + distutils.dir_util.copy_tree(".", prefix) -- cgit v1.2.3-70-g09d2 From 3a07ec6c7d28bb38979209d1d32ebf1742a9e570 Mon Sep 17 00:00:00 2001 From: Justin Too Date: Tue, 6 Jan 2015 15:43:32 -0800 Subject: (Package) Add ROSE compiler package --- .../rose/add_spack_compiler_recognition.patch | 13 ++++++++ var/spack/packages/rose/package.py | 39 ++++++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 var/spack/packages/rose/add_spack_compiler_recognition.patch create mode 100644 var/spack/packages/rose/package.py diff --git a/var/spack/packages/rose/add_spack_compiler_recognition.patch b/var/spack/packages/rose/add_spack_compiler_recognition.patch new file mode 100644 index 0000000000..ce61ae4e4c --- /dev/null +++ b/var/spack/packages/rose/add_spack_compiler_recognition.patch @@ -0,0 +1,13 @@ +diff --git a/config/compiler-defs.m4 b/config/compiler-defs.m4 +index d7d85d2..780c8de 100644 +--- a/config/compiler-defs.m4 ++++ b/config/compiler-defs.m4 +@@ -28,7 +28,7 @@ dnl predefined by a specific compiler + # g++|gcc|mpicc|mpic++|mpicxx|mpiCC) + # TOO (2/16/2011): added support for tensilica compilers, assuming they are + # like GCC (they use a GCC front-end) +- g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) ++ cc*|c++*|g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) + BACKEND_GCC_MAJOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f1` + BACKEND_GCC_MINOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f2` + BACKEND_GCC_PATCHLEVEL=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f3` diff --git a/var/spack/packages/rose/package.py b/var/spack/packages/rose/package.py new file mode 100644 index 0000000000..1d7294acab --- /dev/null +++ b/var/spack/packages/rose/package.py @@ -0,0 +1,39 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +#------------------------------------------------------------------------------ + +from spack import * + +class Rose(Package): + """A compiler infrastructure to build source-to-source program + transformation and analysis tools. + (Developed at Lawrence Livermore National Lab)""" + + homepage = "http://rosecompiler.org/" + url = "https://github.com/rose-compiler/edg4x-rose" + + version('master', branch='master', git='https://github.com/rose-compiler/edg4x-rose.git') + + patch('add_spack_compiler_recognition.patch') + + depends_on("autoconf@2.69") + depends_on("automake@1.14") + depends_on("libtool@2.4") + depends_on("boost@1.54.0") + depends_on("jdk@8u25-linux-x64") + + def install(self, spec, prefix): + # Bootstrap with autotools + bash = which('bash') + bash('build') + + # Configure, compile & install + with working_dir('rose-build', create=True): + boost = spec['boost'] + + configure = Executable('../configure') + configure("--prefix=" + prefix, + "--with-boost=" + boost.prefix, + "--disable-boost-version-check") + make("install-core") + -- cgit v1.2.3-70-g09d2 From 011f71a442deb8d78f0e55ef1e502e2d2426f48c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Jan 2015 19:09:03 -0800 Subject: Fix bug in STAT graph --- lib/spack/spack/graph.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index c4f5de1ebc..bebb68d06a 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -149,6 +149,7 @@ class AsciiGraph(object): self._frontier = None # frontier self._nodes = None # dict from name -> node self._prev_state = None # State of previous line + self._prev_index = None # Index of expansion point of prev line def _indent(self): @@ -195,15 +196,19 @@ class AsciiGraph(object): collapse = True if self._prev_state == EXPAND_RIGHT: - # Special case for when prev. line expanded (spacing is off by 1) - # Need two lines here even when distance in frontier is 1. - self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i,j))) + # Special case where previous line expanded and i is off by 1. + self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j))) collapse = False - elif i-j > 1: - # We need two lines to connect if distance > 1 - self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) - collapse = False + else: + # Previous node also expanded here, so i is off by one. + if self._prev_state == NODE and self._prev_index < i: + i += 1 + + if i-j > 1: + # We need two lines to connect if distance > 1 + self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) + collapse = False self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j))) return True @@ -213,10 +218,11 @@ class AsciiGraph(object): return False - def _set_state(self, state, label=None): + def _set_state(self, state, index, label=None): if state not in states: raise ValueError("Invalid graph state!") self._prev_state = state + self._prev_index = index if self.debug: self._out.write(" " * 20) @@ -296,7 +302,7 @@ class AsciiGraph(object): else: advance(flen, lambda: [("| ", self._pos)] ) - self._set_state(BACK_EDGE, label) + self._set_state(BACK_EDGE, end, label) self._out.write("\n") @@ -312,7 +318,7 @@ class AsciiGraph(object): self._write_edge("| ", c) self._out.write(" %s" % name) - self._set_state(NODE) + self._set_state(NODE, index) self._out.write("\n") @@ -324,7 +330,7 @@ class AsciiGraph(object): for c in range(index, len(self._frontier)): self._write_edge(" /", c) - self._set_state(COLLAPSE) + self._set_state(COLLAPSE, index) self._out.write("\n") @@ -338,7 +344,7 @@ class AsciiGraph(object): for c in range(index+1, len(self._frontier)): self._write_edge("| ", c ) - self._set_state(MERGE_RIGHT) + self._set_state(MERGE_RIGHT, index) self._out.write("\n") @@ -353,7 +359,7 @@ class AsciiGraph(object): for c in range(index+2, len(self._frontier)): self._write_edge(" \\", c) - self._set_state(EXPAND_RIGHT) + self._set_state(EXPAND_RIGHT, index) self._out.write("\n") -- cgit v1.2.3-70-g09d2 From 9db967be9827d44150a840f52ecd1e0f28b5bd4e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Jan 2015 19:23:07 -0800 Subject: Fix bug when all deps are back edges. - Happened with the graph for SAMRAI --- lib/spack/spack/graph.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index bebb68d06a..5fb6a9cd23 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -426,10 +426,13 @@ class AsciiGraph(object): self._back_edge_line(prev_ends, b, i, False, 'left-1') del prev_ends[:] prev_ends.append(b) - self._back_edge_line(prev_ends, -1, -1, False, 'left-2') - if not self._frontier[i]: - self._frontier.pop(i) + # Check whether we did ALL the deps as back edges, + # in which case we're done. + collapse = not self._frontier[i] + if collapse: + self._frontier.pop(i) + self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2') elif len(self._frontier[i]) > 1: # Expand forward after doing all back connections -- cgit v1.2.3-70-g09d2 From 844c0838487529c0f2edc6f09e6ef86f12364716 Mon Sep 17 00:00:00 2001 From: Luc Jaulmes Date: Mon, 12 Jan 2015 20:38:32 +0100 Subject: Updated versions in OmpSs and Extrae, which resolves version-dependency problems with MPI --- var/spack/packages/extrae/package.py | 30 +++++++++++++++--------------- var/spack/packages/ompss/package.py | 5 +++-- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/var/spack/packages/extrae/package.py b/var/spack/packages/extrae/package.py index b1a3a3e440..3ad4cbaf86 100644 --- a/var/spack/packages/extrae/package.py +++ b/var/spack/packages/extrae/package.py @@ -1,5 +1,8 @@ from spack import * +# typical working line with extrae 3.0.1 +# ./configure --prefix=/usr/local --with-mpi=/usr/lib64/mpi/gcc/openmpi --with-unwind=/usr/local --with-papi=/usr --with-dwarf=/usr --with-elf=/usr --with-dyninst=/usr --with-binutils=/usr --with-xml-prefix=/usr --enable-openmp --enable-nanos --enable-pthread --disable-parallel-merge LDFLAGS=-pthread + class Extrae(Package): """Extrae is the package devoted to generate tracefiles which can be analyzed later by Paraver. Extrae is a tool that uses @@ -10,11 +13,10 @@ class Extrae(Package): programming models either alone or in conjunction with MPI : OpenMP, CUDA, OpenCL, pthread, OmpSs""" homepage = "http://www.bsc.es/computer-sciences/extrae" - url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-2.5.1.tar.bz2" - version('2.5.1', '422376b9c68243bd36a8a73fa62de106') + url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.0.1.tar.bz2" + version('3.0.1', 'a6a8ca96cd877723cd8cc5df6bdb922b') - #depends_on("mpi") - depends_on("openmpi@:1.6") + depends_on("mpi") depends_on("dyninst") depends_on("libunwind") depends_on("boost") @@ -24,22 +26,20 @@ class Extrae(Package): def install(self, spec, prefix): if 'openmpi' in spec: mpi = spec['openmpi'] - #if spec.satisfies('@2.5.1') and spec.satisfies('^openmpi@1.6.5'): - # tty.error("Some headers conflict when using OpenMPI 1.6.5. Please use 1.6 instead.") elif 'mpich' in spec: mpi = spec['mpich'] elif 'mvapich2' in spec: mpi = spec['mvapich2'] - configure("--prefix=%s" % prefix, - "--with-mpi=%s" % mpi.prefix, - "--with-unwind=%s" % spec['libunwind'].prefix, - "--with-dyninst=%s" % spec['dyninst'].prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-dwarf=%s" % spec['libdwarf'].prefix, - "--with-papi=%s" % spec['papi'].prefix, - "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, - "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) + configure("--prefix=%s" % prefix, + "--with-mpi=%s" % mpi.prefix, + "--with-unwind=%s" % spec['libunwind'].prefix, + "--with-dyninst=%s" % spec['dyninst'].prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-dwarf=%s" % spec['libdwarf'].prefix, + "--with-papi=%s" % spec['papi'].prefix, + "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, + "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) make() make("install", parallel=False) diff --git a/var/spack/packages/ompss/package.py b/var/spack/packages/ompss/package.py index 544671de38..d529058036 100644 --- a/var/spack/packages/ompss/package.py +++ b/var/spack/packages/ompss/package.py @@ -16,8 +16,9 @@ class Ompss(Package): APIs like CUDA or OpenCL. Our OmpSs environment is built on top of our Mercurium compiler and Nanos++ runtime system.""" homepage = "http://pm.bsc.es/" - url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.06.tar.gz" - version('14.06', '99be5dce74c0d7eea42636d26af47b4181ae2e11') + url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz" + version('14.10', '404d161265748f2f96bb35fd8c7e79ee') + # all dependencies are optional, really depends_on("mpi") -- cgit v1.2.3-70-g09d2 From 917d82be0de261d3d211960749f0dba469253edf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 13 Jan 2015 00:45:12 -0800 Subject: Add list_url for ompss --- var/spack/packages/ompss/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/ompss/package.py b/var/spack/packages/ompss/package.py index d529058036..e09e0a624f 100644 --- a/var/spack/packages/ompss/package.py +++ b/var/spack/packages/ompss/package.py @@ -17,8 +17,9 @@ class Ompss(Package): of our Mercurium compiler and Nanos++ runtime system.""" homepage = "http://pm.bsc.es/" url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz" - version('14.10', '404d161265748f2f96bb35fd8c7e79ee') + list_url = 'http://pm.bsc.es/ompss-downloads' + version('14.10', '404d161265748f2f96bb35fd8c7e79ee') # all dependencies are optional, really depends_on("mpi") @@ -47,4 +48,3 @@ class Ompss(Package): configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options) make() make("install") - -- cgit v1.2.3-70-g09d2 From c6351b5d00708fc3ac08ea330d6710ee45367375 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 14 Jan 2015 00:18:29 -0800 Subject: Fix #11: bug in ProviderIndex - packages that provided same spec (e.g. mpe) were overwritten in the index - Index now has a set of providers instead of a single provider per provided spec. - see https://github.com/scalability-llnl/spack/issues/11 --- lib/spack/spack/virtual.py | 45 +++++++++++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py index 960212eba9..ff5d7c9035 100644 --- a/lib/spack/spack/virtual.py +++ b/lib/spack/spack/virtual.py @@ -26,20 +26,21 @@ The ``virtual`` module contains utility classes for virtual dependencies. """ import spack.spec +import itertools class ProviderIndex(object): """This is a dict of dicts used for finding providers of particular virtual dependencies. The dict of dicts looks like: { vpkg name : - { full vpkg spec : package providing spec } } + { full vpkg spec : set(packages providing spec) } } Callers can use this to first find which packages provide a vpkg, then find a matching full spec. e.g., in this scenario: { 'mpi' : - { mpi@:1.1 : mpich, - mpi@:2.3 : mpich2@1.9: } } + { mpi@:1.1 : set([mpich]), + mpi@:2.3 : set([mpich2@1.9:]) } } Calling providers_for(spec) will find specs that provide a matching implementation of MPI. @@ -75,15 +76,19 @@ class ProviderIndex(object): if provided_name not in self.providers: self.providers[provided_name] = {} + provider_map = self.providers[provided_name] + if not provided_spec in provider_map: + provider_map[provided_spec] = set() + if self.restrict: - self.providers[provided_name][provided_spec] = spec + provider_map[provided_spec].add(spec) else: # Before putting the spec in the map, constrain it so that # it provides what was asked for. constrained = spec.copy() constrained.constrain(provider_spec) - self.providers[provided_name][provided_spec] = constrained + provider_map[provided_spec].add(constrained) def providers_for(self, *vpkg_specs): @@ -97,9 +102,9 @@ class ProviderIndex(object): # Add all the providers that satisfy the vpkg spec. if vspec.name in self.providers: - for provider_spec, spec in self.providers[vspec.name].items(): + for provider_spec, spec_set in self.providers[vspec.name].items(): if provider_spec.satisfies(vspec, deps=False): - providers.add(spec) + providers.update(spec_set) # Return providers in order return sorted(providers) @@ -108,16 +113,22 @@ class ProviderIndex(object): # TODO: this is pretty darned nasty, and inefficient. def _cross_provider_maps(self, lmap, rmap): result = {} - for lspec in lmap: - for rspec in rmap: - try: - constrained = lspec.copy().constrain(rspec) - if lmap[lspec].name != rmap[rspec].name: + for lspec, rspec in itertools.product(lmap, rmap): + try: + constrained = lspec.copy().constrain(rspec) + except spack.spec.UnsatisfiableSpecError: + continue + + # lp and rp are left and right provider specs. + for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]): + if lp_spec.name == rp_spec.name: + try: + const = lp_spec.copy().constrain(rp_spec,deps=False) + if constrained not in result: + result[constrained] = set() + result[constrained].add(const) + except spack.spec.UnsatisfiableSpecError: continue - result[constrained] = lmap[lspec].copy().constrain( - rmap[rspec], deps=False) - except spack.spec.UnsatisfiableSpecError: - continue return result @@ -132,6 +143,8 @@ class ProviderIndex(object): if not common: return True + # This ensures that some provider in other COULD satisfy the + # vpkg constraints on self. result = {} for name in common: crossed = self._cross_provider_maps(self.providers[name], -- cgit v1.2.3-70-g09d2 From 4e3662f3188df8c9b6631caf7802e5ece3960b39 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 17 Jan 2015 17:09:42 -0800 Subject: Dyninst 8.2 works. --- var/spack/packages/dyninst/package.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/dyninst/package.py b/var/spack/packages/dyninst/package.py index 9fbc6385a9..2e6f3e010a 100644 --- a/var/spack/packages/dyninst/package.py +++ b/var/spack/packages/dyninst/package.py @@ -31,13 +31,34 @@ class Dyninst(Package): url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" list_url = "http://www.dyninst.org/downloads/dyninst-8.x" - version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a') - version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac') + version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', + url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") + version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a', + url="http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz") + version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac', + url="http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz") depends_on("libelf") depends_on("libdwarf") - depends_on("boost@1.42:") + depends_on("boost@1.42:1.43") + # new version uses cmake + def install(self, spec, prefix): + libelf = spec['libelf'].prefix + libdwarf = spec['libdwarf'].prefix + + with working_dir('spack-build', create=True): + cmake('..', + '-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'), + '-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'), + '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include, + '-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'), + *std_cmake_args) + make() + make("install") + + + @when('@:8.1') def install(self, spec, prefix): configure("--prefix=" + prefix) make() -- cgit v1.2.3-70-g09d2 From a4c19eee14103e9944e04917a44edec2b3209f94 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:00:54 -0800 Subject: Qt5 webkit requires gperf --- var/spack/packages/gperf/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 var/spack/packages/gperf/package.py diff --git a/var/spack/packages/gperf/package.py b/var/spack/packages/gperf/package.py new file mode 100644 index 0000000000..32551b67b4 --- /dev/null +++ b/var/spack/packages/gperf/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Gperf(Package): + """GNU gperf is a perfect hash function generator. For a given + list of strings, it produces a hash function and hash table, in + form of C or C++ code, for looking up a value depending on the + input string. The hash function is perfect, which means that the + hash table has no collisions, and the hash table lookup needs a + single string comparison only.""" + + homepage = "https://www.gnu.org/software/gperf/" + url = "http://ftp.gnu.org/pub/gnu/gperf/gperf-3.0.4.tar.gz" + + version('3.0.4', 'c1f1db32fb6598d6a93e6e88796a8632') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From f35b8b8db42332a93168e3f8a707442186359a6c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:05:48 -0800 Subject: Better location error output. --- lib/spack/spack/cmd/location.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 509c336b69..e422eaf966 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import sys from external import argparse import llnl.util.tty as tty @@ -86,11 +87,12 @@ def location(parser, args): tty.die("Spec '%s' matches no installed packages." % spec) elif len(matching_specs) > 1: - args = ["%s matches multiple packages." % spec, - "Matching packages:"] - args += [" " + str(s) for s in matching_specs] - args += ["Use a more specific spec."] - tty.die(*args) + tty.error("%s matches multiple packages:" % spec) + for s in matching_specs: + sys.stderr.write(s.tree(color=True)) + sys.stderr.write("\n") + sys.stderr.write("Use a more specific spec.\n") + sys.exit(1) print matching_specs[0].prefix -- cgit v1.2.3-70-g09d2 From 53c8b4249ad5b875678ebe6d784fbc5493aef160 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:06:09 -0800 Subject: Make dbus put a machine id file in the right place. --- var/spack/packages/dbus/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/packages/dbus/package.py b/var/spack/packages/dbus/package.py index 5fee103f03..f7f394498c 100644 --- a/var/spack/packages/dbus/package.py +++ b/var/spack/packages/dbus/package.py @@ -23,3 +23,7 @@ class Dbus(Package): configure("--prefix=%s" % prefix) make() make("install") + + # dbus needs a machine id generated after install + dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen')) + dbus_uuidgen('--ensure') -- cgit v1.2.3-70-g09d2 From 0211adbdb6010b403495644aab619ba8a9bed31e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:06:25 -0800 Subject: version bump libpng --- var/spack/packages/libpng/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/libpng/package.py b/var/spack/packages/libpng/package.py index a6d9bf0b46..affc14ea92 100644 --- a/var/spack/packages/libpng/package.py +++ b/var/spack/packages/libpng/package.py @@ -3,9 +3,9 @@ from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" - url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download" + url = "http://prdownloads.sourceforge.net/libpng/libpng-1.6.16.tar.gz?download" - version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) -- cgit v1.2.3-70-g09d2 From b7dacb427d2d9e80778b6c0eec05e0cc64d60859 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:06:45 -0800 Subject: Qt5 & VTK builds. VTK works with Qt 4 and 5. --- var/spack/packages/qt/package.py | 76 +++++++++++++++++++++++++++++++-------- var/spack/packages/vtk/package.py | 40 +++++++++++++++++++++ 2 files changed, 101 insertions(+), 15 deletions(-) create mode 100644 var/spack/packages/vtk/package.py diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 01f9de7f3c..4f82a9d9d6 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -3,7 +3,16 @@ from spack import * class Qt(Package): """Qt is a comprehensive cross-platform C++ application framework.""" homepage = "http://qt.io" + list_url = 'http://download.qt-project.org/official_releases/qt/' + list_depth = 2 + version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', + url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') + version('5.3.2', 'febb001129927a70174467ecb508a682', + url='http://download.qt.io/archive/qt/5.3/5.3.2/single/qt-everywhere-opensource-src-5.3.2.tar.gz') + + version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8', + url='http://download.qt.io/archive/qt/5.2/5.2.1/single/qt-everywhere-opensource-src-5.2.1.tar.gz') version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") @@ -20,25 +29,62 @@ class Qt(Package): depends_on("libmng") depends_on("jpeg") + depends_on("gperf") # Needed to build Qt with webkit. + def patch(self): + if self.spec.satisfies('@4'): + qmake_conf = 'mkspecs/common/g++-base.conf' + elif self.spec.satisfies('@5'): + qmake_conf = 'qtbase/mkspecs/common/g++-base.conf' + else: + return + # Fix qmake compilers in the default mkspec - qmake_conf = 'mkspecs/common/g++-base.conf' - filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) - filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) + filter_file(r'^QMAKE_COMPILER *=.*$', 'QMAKE_COMPILER = cc', qmake_conf) + filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) + filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) - def install(self, spec, prefix): - configure('-v', - '-confirm-license', - '-opensource', - '-prefix', prefix, - '-openssl-linked', - '-dbus-linked', - '-fast', - '-optimized-qmake', - '-no-pch', - '-no-phonon', + @property + def common_config_args(self): + return [ + '-prefix', self.prefix, + '-v', + '-opensource', + "-release", + '-shared', + '-confirm-license', + '-openssl-linked', + '-dbus-linked', + '-optimized-qmake', + '-no-openvg', + '-no-pch', + # For now, disable all the database drivers + "-no-sql-db2", "-no-sql-ibase", "-no-sql-mysql", "-no-sql-oci", "-no-sql-odbc", + "-no-sql-psql", "-no-sql-sqlite", "-no-sql-sqlite2", "-no-sql-tds", + # NIS is deprecated in more recent glibc + "-no-nis"] + + + @when('@4') + def configure(self): + configure('-no-phonon', '-no-phonon-backend', - '-no-openvg') + '-fast', + *self.common_config_args) + + + @when('@5') + def configure(self): + configure('-no-eglfs', + '-no-directfb', + '-qt-xcb', + # If someone wants to get a webkit build working, be my guest! + '-skip', 'qtwebkit', + *self.common_config_args) + + + def install(self, spec, prefix): + self.configure() make() make("install") diff --git a/var/spack/packages/vtk/package.py b/var/spack/packages/vtk/package.py new file mode 100644 index 0000000000..4a27a8fedb --- /dev/null +++ b/var/spack/packages/vtk/package.py @@ -0,0 +1,40 @@ +from spack import * + +class Vtk(Package): + """The Visualization Toolkit (VTK) is an open-source, freely + available software system for 3D computer graphics, image + processing and visualization. """ + homepage = "http://www.vtk.org" + url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz" + + version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d') + + depends_on("qt") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake_args = [ + "..", + "-DBUILD_SHARED_LIBS=ON", + # Disable wrappers for other languages. + "-DVTK_WRAP_PYTHON=OFF", + "-DVTK_WRAP_JAVA=OFF", + "-DVTK_WRAP_TCL=OFF"] + cmake_args.extend(std_cmake_args) + + # Enable Qt support here. + cmake_args.extend([ + "-DQT_QMAKE_EXECUTABLE:PATH=%s/qmake" % spec['qt'].prefix.bin, + "-DVTK_Group_Qt:BOOL=ON", + # Ignore webkit because it's hard to build w/Qt + "-DVTK_Group_Qt=OFF", + "-DModule_vtkGUISupportQt:BOOL=ON", + "-DModule_vtkGUISupportQtOpenGL:BOOL=ON" + ]) + + if spec['qt'].satisfies('@5'): + cmake_args.append("-DVTK_QT_VERSION:STRING=5") + + cmake(*cmake_args) + make() + make("install") -- cgit v1.2.3-70-g09d2 From d08c0703a06825311a4fabf2c49f7f04cceadbf9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 14:07:41 -0800 Subject: Initial build of MemAxes GUI. --- var/spack/packages/memaxes/package.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 var/spack/packages/memaxes/package.py diff --git a/var/spack/packages/memaxes/package.py b/var/spack/packages/memaxes/package.py new file mode 100644 index 0000000000..afa62009a6 --- /dev/null +++ b/var/spack/packages/memaxes/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Memaxes(Package): + """MemAxes is a visualizer for sampled memory trace data.""" + + homepage = "https://github.com/scalability-llnl/MemAxes" + + version('0.5', 'b0f561d48aa7301e028d074bc4b5751b', + url='https://github.com/scalability-llnl/MemAxes/archive/v0.5.tar.gz') + + depends_on("cmake@2.8.9:") + depends_on("qt@5:") + depends_on("vtk") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") + -- cgit v1.2.3-70-g09d2 From 2a0e33876ec9f6e33a0408228463a021a1809668 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 20:41:45 -0800 Subject: Add PSAPI --- var/spack/packages/psapi/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/packages/psapi/package.py diff --git a/var/spack/packages/psapi/package.py b/var/spack/packages/psapi/package.py new file mode 100644 index 0000000000..b73c75c221 --- /dev/null +++ b/var/spack/packages/psapi/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Psapi(Package): + """PSAPI is a library and a tool for collecting sampled memory + performance data to view with MemAxes""" + + homepage = "https://github.com/scalability-llnl/PSAPI" + url = "http://www.example.com/memaxes-psapi-1.0.tar.gz" + + version('0.5', git='https://github.com/scalability-llnl/PSAPI.git', tag='v0.5') + + depends_on('dyninst') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 51ed0d3f6f3ba8cf3c7a17db967aead0b691e3a4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 19 Jan 2015 20:59:23 -0800 Subject: Properly set install RPATHS for cmake builds. --- lib/spack/spack/build_environment.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 45353ec640..70b5c89411 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -183,6 +183,10 @@ def set_module_variables_for_package(pkg): if platform.mac_ver()[0]: m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST') + # Set up CMake rpath + m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') + m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg))) + # Emulate some shell commands for convenience m.pwd = os.getcwd m.cd = os.chdir @@ -202,6 +206,16 @@ def set_module_variables_for_package(pkg): m.prefix = pkg.prefix +def get_rpaths(pkg): + """Get a list of all the rpaths for a package.""" + rpaths = [pkg.prefix.lib, pkg.prefix.lib64] + rpaths.extend(d.prefix.lib for d in pkg.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib)) + rpaths.extend(d.prefix.lib64 for d in pkg.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib64)) + return rpaths + + def setup_package(pkg): """Execute all environment setup routines.""" set_compiler_environment_variables(pkg) -- cgit v1.2.3-70-g09d2 From e97db785d6d2f3a5b54d3a9e6ff0782901aa1b11 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Wed, 21 Jan 2015 20:42:44 -0800 Subject: psapi v0.6 --- var/spack/packages/psapi/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/psapi/package.py b/var/spack/packages/psapi/package.py index b73c75c221..489c4f530e 100644 --- a/var/spack/packages/psapi/package.py +++ b/var/spack/packages/psapi/package.py @@ -7,7 +7,7 @@ class Psapi(Package): homepage = "https://github.com/scalability-llnl/PSAPI" url = "http://www.example.com/memaxes-psapi-1.0.tar.gz" - version('0.5', git='https://github.com/scalability-llnl/PSAPI.git', tag='v0.5') + version('0.6', git='https://github.com/scalability-llnl/PSAPI.git', tag='v0.6') depends_on('dyninst') -- cgit v1.2.3-70-g09d2 From e6b4530234dc9c32fc69a0dae21cf8c28ce33804 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 22 Jan 2015 13:52:28 -0800 Subject: Add is_exe function to filesystem. --- lib/spack/llnl/util/filesystem.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 9f08832598..0578415653 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -24,7 +24,7 @@ ############################################################################## __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', - 'can_access', 'filter_file', 'change_sed_delimiter'] + 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe'] import os import sys @@ -154,6 +154,11 @@ def install(src, dest): os.chmod(dest, dest_mode) +def is_exe(path): + """True if path is an executable file.""" + return os.path.isfile(path) and os.access(path, os.X_OK) + + def expand_user(path): """Find instances of '%u' in a path and replace with the current user's username.""" -- cgit v1.2.3-70-g09d2 From 3e37903ffdbc72410264d1949dde9239e81ec698 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 22 Jan 2015 23:34:39 -0800 Subject: Packages have rpath property. --- lib/spack/spack/package.py | 31 +++++++++++++++++++++++++++---- var/spack/packages/gcc/package.py | 29 +++++++++++++++++++++++------ 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 1a797e88b1..fe4fc748d8 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -713,6 +713,14 @@ class Package(object): tty.msg("Patched %s" % self.name) + def do_fake_install(self): + """Make a fake install directory contaiing a 'fake' file in bin.""" + mkdirp(self.prefix.bin) + touch(join_path(self.prefix.bin, 'fake')) + mkdirp(self.prefix.lib) + mkdirp(self.prefix.man1) + + def do_install(self, **kwargs): """This class should call this version of the install method. Package implementations should override install(). @@ -758,13 +766,11 @@ class Package(object): spack.install_layout.make_path_for_spec(self.spec) # Set up process's build environment before running install. + self.stage.chdir_to_source() build_env.setup_package(self) if fake_install: - mkdirp(self.prefix.bin) - touch(join_path(self.prefix.bin, 'fake')) - mkdirp(self.prefix.lib) - mkdirp(self.prefix.man1) + self.do_fake_install() else: # Subclasses implement install() to do the real work. self.install(self.spec, self.prefix) @@ -925,6 +931,23 @@ class Package(object): e.url, e.message) + @property + def rpath(self): + """Get the rpath this package links with, as a list of paths.""" + rpaths = [self.prefix.lib, self.prefix.lib64] + rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib)) + rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib64)) + return rpaths + + + @property + def rpath_args(self): + """Get the rpath args as a string, with -Wl,-rpath= for each element.""" + return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) + + def find_versions_of_archive(*archive_urls, **kwargs): list_url = kwargs.get('list_url', None) list_depth = kwargs.get('list_depth', 1) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index da0debd5dc..3da6c25d47 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -32,29 +32,46 @@ class Gcc(Package): Objective-C, Fortran, and Java.""" homepage = "https://gcc.gnu.org" + url = "http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2" list_url = 'http://open-source-box.org/gcc/' list_depth = 2 - version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43', - url="http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2") - version('4.9.1', 'fddf71348546af523353bd43d34919c1', - url="http://open-source-box.org/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2") + version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43') + version('4.9.1', 'fddf71348546af523353bd43d34919c1') + version('4.8.4', '5a84a30839b2aca22a2d723de2a626ec') + version('4.7.4', '4c696da46297de6ae77a82797d2abe28') + version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') + version('4.5.4', '27e459c2566b8209ab064570e1b378f7') - depends_on("mpc") depends_on("mpfr") depends_on("gmp") + depends_on("mpc") # when @4.5: depends_on("libelf") + # Save these until we can do optional deps. + #depends_on("isl") + #depends_on("ppl") + #depends_on("cloog") def install(self, spec, prefix): # libjava/configure needs a minor fix to install into spack paths. filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True) + enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc')) + if spec.satisfies("@4.7.1:"): + enabled_languages.add('go') + # Rest of install is straightforward. configure("--prefix=%s" % prefix, "--libdir=%s/lib64" % prefix, "--disable-multilib", - "--enable-languages=c,c++,fortran,java,objc,go", + "--enable-languages=" + ','.join(enabled_languages), + "--with-mpc=%s" % spec['mpc'].prefix, + "--with-mpfr=%s" % spec['mpfr'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix, + "--with-libelf=%s" % spec['libelf'].prefix, + "--with-stage1-ldflags=%s" % self.rpath_args, + "--with-boot-ldflags=%s" % self.rpath_args, "--enable-lto", "--with-quad") make() -- cgit v1.2.3-70-g09d2 From 0ac6ffb3ef8aee18b5b559df691b6d81d5a00d35 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 23 Jan 2015 00:05:23 -0800 Subject: Add extra gcc dependencies. - not used until optional/conditional deps are implemented. --- var/spack/packages/cloog/package.py | 26 ++++++++++++++++++++++++++ var/spack/packages/isl/package.py | 17 +++++++++++++++++ var/spack/packages/ppl/package.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 71 insertions(+) create mode 100644 var/spack/packages/cloog/package.py create mode 100644 var/spack/packages/isl/package.py create mode 100644 var/spack/packages/ppl/package.py diff --git a/var/spack/packages/cloog/package.py b/var/spack/packages/cloog/package.py new file mode 100644 index 0000000000..814a33c76c --- /dev/null +++ b/var/spack/packages/cloog/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Cloog(Package): + """CLooG is a free software and library to generate code for + scanning Z-polyhedra. That is, it finds a code (e.g. in C, + FORTRAN...) that reaches each integral point of one or more + parameterized polyhedra.""" + + homepage = "http://www.cloog.org" + url = "http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz" + list_url = "http://www.bastoul.net/cloog/pages/download" + + version('0.18.1', 'e34fca0540d840e5d0f6427e98c92252') + version('0.18.0', 'be78a47bd82523250eb3e91646db5b3d') + version('0.17.0', '0aa3302c81f65ca62c114e5264f8a802') + + depends_on("gmp") + depends_on("isl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-osl=no", + "--with-isl=%s" % spec['isl'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix) + make() + make("install") diff --git a/var/spack/packages/isl/package.py b/var/spack/packages/isl/package.py new file mode 100644 index 0000000000..836ef3ea40 --- /dev/null +++ b/var/spack/packages/isl/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Isl(Package): + """isl is a thread-safe C library for manipulating sets and + relations of integer points bounded by affine constraints.""" + homepage = "http://isl.gforge.inria.fr" + url = "http://isl.gforge.inria.fr/isl-0.14.tar.bz2" + + version('0.14', 'acd347243fca5609e3df37dba47fd0bb') + + depends_on("gmp") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-gmp-prefix=%s" % spec['gmp'].prefix) + make() + make("install") diff --git a/var/spack/packages/ppl/package.py b/var/spack/packages/ppl/package.py new file mode 100644 index 0000000000..018d5c523d --- /dev/null +++ b/var/spack/packages/ppl/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Ppl(Package): + """The Parma Polyhedra Library (PPL) provides numerical + abstractions especially targeted at applications in the field of + analysis and verification of complex systems. These abstractions + include convex polyhedra, some special classes of polyhedra shapes + that offer interesting complexity/precision tradeoffs, and grids + which represent regularly spaced points that satisfy a set of + linear congruence relations. The library also supports finite + powersets and products of polyhedra and grids, a mixed integer + linear programming problem solver using an exact-arithmetic + version of the simplex algorithm, a parametric integer programming + solver, and primitives for termination analysis via the automatic + synthesis of linear ranking functions.""" + + homepage = "http://bugseng.com/products/ppl/" + url = "http://bugseng.com/products/ppl/download/ftp/releases/1.1/ppl-1.1.tar.gz" + + version('1.1', '4f2422c0ef3f409707af32108deb30a7') + + depends_on("gmp") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-gmp=%s" % spec['gmp'].prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 6a496ef62038f40ad1f4c22abd816f990fdfd4db Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Fri, 23 Jan 2015 16:58:15 -0800 Subject: PSAPI v0.6 -> Mitos v0.7 --- var/spack/packages/Mitos/package.py | 18 ++++++++++++++++++ var/spack/packages/psapi/package.py | 18 ------------------ 2 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 var/spack/packages/Mitos/package.py delete mode 100644 var/spack/packages/psapi/package.py diff --git a/var/spack/packages/Mitos/package.py b/var/spack/packages/Mitos/package.py new file mode 100644 index 0000000000..272360a55c --- /dev/null +++ b/var/spack/packages/Mitos/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Mitos(Package): + """Mitos is a library and a tool for collecting sampled memory + performance data to view with MemAxes""" + + homepage = "https://github.com/scalability-llnl/Mitos" + url = "https://github.com/scalability-llnl/Mitos" + + version('0.7', git='https://github.com/scalability-llnl/Mitos.git', tag='v0.7') + + depends_on('dyninst') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/packages/psapi/package.py b/var/spack/packages/psapi/package.py deleted file mode 100644 index 489c4f530e..0000000000 --- a/var/spack/packages/psapi/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Psapi(Package): - """PSAPI is a library and a tool for collecting sampled memory - performance data to view with MemAxes""" - - homepage = "https://github.com/scalability-llnl/PSAPI" - url = "http://www.example.com/memaxes-psapi-1.0.tar.gz" - - version('0.6', git='https://github.com/scalability-llnl/PSAPI.git', tag='v0.6') - - depends_on('dyninst') - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") -- cgit v1.2.3-70-g09d2 From ba593ccb260e46568fc2620ad5f300f89495a604 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 7 Jan 2015 17:41:24 -0500 Subject: Fix bug in mirror path construction. --- lib/spack/spack/mirror.py | 13 ++++++++----- lib/spack/spack/package.py | 2 +- lib/spack/spack/test/mirror.py | 8 ++++---- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 929c514b61..114c7b6a35 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -46,7 +46,7 @@ from spack.util.compression import extension def mirror_archive_filename(spec): - """Get the path that this spec will live at within a mirror.""" + """Get the name of the spec's archive in the mirror.""" if not spec.version.concrete: raise ValueError("mirror.path requires spec with concrete version.") @@ -61,6 +61,11 @@ def mirror_archive_filename(spec): return "%s-%s.%s" % (spec.package.name, spec.version, ext) +def mirror_archive_path(spec): + """Get the relative path to the spec's archive within a mirror.""" + return join_path(spec.name, mirror_archive_filename(spec)) + + def get_matching_versions(specs, **kwargs): """Get a spec for EACH known version matching any spec in the list.""" matching = [] @@ -141,12 +146,10 @@ def create(path, specs, **kwargs): stage = None try: # create a subdirectory for the current package@version - subdir = join_path(mirror_root, pkg.name) + archive_path = join_path(path, mirror_archive_path(spec)) + subdir = os.path.dirname(archive_path) mkdirp(subdir) - archive_file = mirror_archive_filename(spec) - archive_path = join_path(subdir, archive_file) - if os.path.exists(archive_path): tty.msg("Already added %s" % spec.format("$_$@")) present.append(spec) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fe4fc748d8..1dfd3d1c83 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -459,7 +459,7 @@ class Package(object): raise ValueError("Can only get a stage for a concrete package.") if self._stage is None: - mp = spack.mirror.mirror_archive_filename(self.spec) + mp = spack.mirror.mirror_archive_path(self.spec) self._stage = Stage( self.fetcher, mirror_path=mp, name=self.spec.short_spec) return self._stage diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 51334198ec..89ab14359e 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -44,7 +44,7 @@ class MirrorTest(MockPackagesTest): self.repos = {} - def set_up_package(self, name, mock_repo_class, url_attr): + def set_up_package(self, name, MockRepoClass, url_attr): """Use this to set up a mock package to be mirrored. Each package needs us to: 1. Set up a mock repo/archive to fetch from. @@ -56,7 +56,7 @@ class MirrorTest(MockPackagesTest): # Get the package and fix its fetch args to point to a mock repo pkg = spack.db.get(spec) - repo = mock_repo_class() + repo = MockRepoClass() self.repos[name] = repo # change the fetch args of the first (only) version. @@ -71,7 +71,7 @@ class MirrorTest(MockPackagesTest): for name, repo in self.repos.items(): if repo.stage: - repo.stage.destroy() + pass #repo.stage.destroy() self.repos.clear() @@ -129,7 +129,7 @@ class MirrorTest(MockPackagesTest): self.assertTrue(all(l in exclude for l in dcmp.left_only)) finally: - stage.destroy() + pass #stage.destroy() def test_git_mirror(self): -- cgit v1.2.3-70-g09d2 From 88afad3e46326c56aaeaed6adc2058033ad7ad33 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 6 Jan 2015 14:47:51 -0500 Subject: Directory layout can now track installed extensions per package. --- lib/spack/spack/directory_layout.py | 97 ++++++++++++++++++++++++++++++++++++- 1 file changed, 96 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 42cac0c9d2..4ab9a515cf 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -71,6 +71,21 @@ class DirectoryLayout(object): raise NotImplementedError() + def get_extensions(self, spec): + """Get a set of currently installed extension packages for a spec.""" + raise NotImplementedError() + + + def add_extension(self, spec, extension_spec): + """Add to the list of currently installed extensions.""" + raise NotImplementedError() + + + def remove_extension(self, spec, extension_spec): + """Remove from the list of currently installed extensions.""" + raise NotImplementedError() + + def path_for_spec(self, spec): """Return an absolute path from the root to a directory for the spec.""" _check_concrete(spec) @@ -134,9 +149,11 @@ class SpecHashDirectoryLayout(DirectoryLayout): """Prefix size is number of characters in the SHA-1 prefix to use to make each hash unique. """ - spec_file_name = kwargs.get('spec_file_name', '.spec') + spec_file_name = kwargs.get('spec_file_name', '.spec') + extension_file_name = kwargs.get('extension_file_name', '.extensions') super(SpecHashDirectoryLayout, self).__init__(root) self.spec_file_name = spec_file_name + self.extension_file_name = extension_file_name def relative_path_for_spec(self, spec): @@ -225,6 +242,55 @@ class SpecHashDirectoryLayout(DirectoryLayout): yield spec + def extension_file_path(self, spec): + """Gets full path to an installed package's extension file""" + _check_concrete(spec) + return join_path(self.path_for_spec(spec), self.extension_file_name) + + + def get_extensions(self, spec): + path = self.extension_file_path(spec) + + extensions = set() + if os.path.exists(path): + with closing(open(path)) as spec_file: + for line in spec_file: + try: + extensions.add(Spec(line)) + except SpecError, e: + raise InvalidExtensionSpecError(str(e)) + return extensions + + + def write_extensions(self, extensions): + path = self.extension_file_path(spec) + with closing(open(path, 'w')) as spec_file: + for extension in sorted(extensions): + spec_file.write("%s\n" % extensions) + + + def add_extension(self, spec, extension_spec): + exts = get_extensions(spec) + if extension_spec in exts: + raise ExtensionAlreadyInstalledError(spec, extension_spec) + else: + for already_installed in exts: + if spec.name == extension_spec.name: + raise ExtensionConflictError(spec, extension_spec, already_installed) + + exts.add(extension_spec) + self.write_extensions(exts) + + + def remove_extension(self, spec, extension_spec): + exts = get_extensions(spec) + if not extension_spec in exts: + raise NoSuchExtensionError(spec, extension_spec) + + exts.remove(extension_spec) + self.write_extensions(exts) + + class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" def __init__(self, message): @@ -250,3 +316,32 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): def __init__(self, path): super(InstallDirectoryAlreadyExistsError, self).__init__( "Install path %s already exists!") + + +class InvalidExtensionSpecError(DirectoryLayoutError): + """Raised when an extension file has a bad spec in it.""" + def __init__(self, message): + super(InvalidExtensionSpecError, self).__init__(message) + + +class ExtensionAlreadyInstalledError(DirectoryLayoutError): + """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, extension_spec): + super(ExtensionAlreadyInstalledError, self).__init__( + "%s is already installed in %s" % (extension_spec, spec)) + + +class ExtensionConflictError(DirectoryLayoutError): + """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, extension_spec, conflict): + super(ExtensionConflictError, self).__init__( + "%s cannot be installed in %s because it conflicts with %s."% ( + extension_spec, spec, conflict)) + + +class NoSuchExtensionError(DirectoryLayoutError): + """Raised when an extension isn't there on remove.""" + def __init__(self, spec, extension_spec): + super(NoSuchExtensionError, self).__init__( + "%s cannot be removed from %s beacuse it's not installed."% ( + extension_spec, spec, conflict)) -- cgit v1.2.3-70-g09d2 From ebe0c1d83ac1380a6320a8dadcfa2ad4fc07c279 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 6 Jan 2015 14:49:13 -0500 Subject: New "extends" relation adds another special list to the package class. --- lib/spack/spack/package.py | 3 +++ lib/spack/spack/relations.py | 26 +++++++++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 1dfd3d1c83..c256ea479f 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -314,6 +314,9 @@ class Package(object): """Specs of virtual packages provided by this package, keyed by name.""" provided = {} + """Specs of packages this one extends, keyed by name.""" + extendees = {} + """Specs of conflicting packages, keyed by name. """ conflicted = {} diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index b1f4348945..aaca9c199e 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -107,8 +107,9 @@ def depends_on(*specs): """Adds a dependencies local variable in the locals of the calling class, based on args. """ pkg = get_calling_package_name() + clocals = caller_locals() + dependencies = clocals.setdefault('dependencies', {}) - dependencies = caller_locals().setdefault('dependencies', {}) for string in specs: for spec in spack.spec.parse(string): if pkg == spec.name: @@ -116,6 +117,29 @@ def depends_on(*specs): dependencies[spec.name] = spec +def extends(*specs): + """Same as depends_on, but dependency is symlinked into parent prefix. + + This is for Python and other language modules where the module + needs to be installed into the prefix of the Python installation. + Spack handles this by installing modules into their own prefix, + but allowing ONE module version to be symlinked into a parent + Python install at a time. + + """ + pkg = get_calling_package_name() + clocals = caller_locals() + dependencies = clocals.setdefault('dependencies', {}) + extendees = clocals.setdefault('extendees', {}) + + for string in specs: + for spec in spack.spec.parse(string): + if pkg == spec.name: + raise CircularReferenceError('depends_on', pkg) + dependencies[spec.name] = spec + extendees[spec.name] = spec + + def provides(*specs, **kwargs): """Allows packages to provide a virtual dependency. If a package provides 'mpi', other packages can declare that they depend on "mpi", and spack -- cgit v1.2.3-70-g09d2 From adb7d614e69a0c176c86b3b4aaa1e81d403d0a71 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 6 Jan 2015 14:50:14 -0500 Subject: Add pre-install and pre-uninstall hooks. --- lib/spack/spack/hooks/__init__.py | 7 ++++++- lib/spack/spack/package.py | 8 ++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 98b7f2323f..1c44e8abaa 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -31,7 +31,9 @@ Currently the following hooks are supported: + * pre_install() * post_install() + * pre_uninstall() * post_uninstall() This can be used to implement support for things like module @@ -70,5 +72,8 @@ class HookRunner(object): # # Define some functions that can be called to fire off hooks. # -post_install = HookRunner('post_install') +pre_install = HookRunner('pre_install') +post_install = HookRunner('post_install') + +pre_uninstall = HookRunner('pre_uninstall') post_uninstall = HookRunner('post_uninstall') diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c256ea479f..aa79721266 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -768,6 +768,10 @@ class Package(object): # package naming scheme it likes. spack.install_layout.make_path_for_spec(self.spec) + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) + # Set up process's build environment before running install. self.stage.chdir_to_source() build_env.setup_package(self) @@ -862,6 +866,10 @@ class Package(object): "The following installed packages depend on it: %s" % ' '.join(formatted_deps)) + # Pre-uninstall hook runs first. + spack.hooks.pre_uninstall(self) + + # Uninstalling in Spack only requires removing the prefix. self.remove_prefix() tty.msg("Successfully uninstalled %s." % self.spec.short_spec) -- cgit v1.2.3-70-g09d2 From 2c1eda66c4d7a0df8f0a05ad16be38942f54dcee Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 6 Jan 2015 14:50:40 -0500 Subject: First python extension package: setuptools --- var/spack/packages/py-setuptools/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 var/spack/packages/py-setuptools/package.py diff --git a/var/spack/packages/py-setuptools/package.py b/var/spack/packages/py-setuptools/package.py new file mode 100644 index 0000000000..e2c4e1a0be --- /dev/null +++ b/var/spack/packages/py-setuptools/package.py @@ -0,0 +1,19 @@ +from spack import * + +class PySetuptools(Package): + """Easily download, build, install, upgrade, and uninstall Python packages.""" + homepage = "https://pypi.python.org/pypi/setuptools" + url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" + + version('11.3.1', '01f69212e019a2420c1693fb43593930') + + extends('python') + + def install(self, spec, prefix): + site_packages_dir = "%s/lib/python2.7/site-packages" % prefix + mkdirp(site_packages_dir) + + env['PYTHONPATH'] = site_packages_dir + + python = which('python') + python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From 7215aee224150d954e8a5bd6b632b6d8f66948d2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 7 Jan 2015 17:45:14 -0500 Subject: do_install() passes kwargs to dependencies. --- lib/spack/spack/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index aa79721266..04f0d842da 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -744,7 +744,7 @@ class Package(object): tty.msg("Installing %s" % self.name) if not ignore_deps: - self.do_install_dependencies() + self.do_install_dependencies(**kwargs) start_time = time.time() if not fake_install: @@ -832,10 +832,10 @@ class Package(object): spack.hooks.post_install(self) - def do_install_dependencies(self): + def do_install_dependencies(self, **kwargs): # Pass along paths of dependencies here for dep in self.spec.dependencies.values(): - dep.package.do_install() + dep.package.do_install(**kwargs) @property -- cgit v1.2.3-70-g09d2 From 99775434785779d223997a9e41972da470214e5d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 7 Jan 2015 11:48:21 -0800 Subject: Added feature: package extensions - packages can be "extended" by others - allows extension to be symlinked into extendee's prefix. - used for python modules. - first module: py-setuptools --- lib/spack/llnl/util/filesystem.py | 82 +++++++++++++++++++++++++++++++++- lib/spack/spack/__init__.py | 2 +- lib/spack/spack/directory_layout.py | 55 ++++++++++++++++------- lib/spack/spack/hooks/extensions.py | 49 +++++++++++++++++++++ lib/spack/spack/package.py | 85 ++++++++++++++++++++++++++++++++++++ lib/spack/spack/relations.py | 4 +- var/spack/packages/python/package.py | 3 ++ 7 files changed, 261 insertions(+), 19 deletions(-) create mode 100644 lib/spack/spack/hooks/extensions.py diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 0578415653..9fb76d3a35 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -24,7 +24,8 @@ ############################################################################## __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', - 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe'] + 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe', + 'check_link_tree', 'merge_link_tree', 'unmerge_link_tree'] import os import sys @@ -222,3 +223,82 @@ def ancestor(dir, n=1): def can_access(file_name): """True if we have read/write access to the file.""" return os.access(file_name, os.R_OK|os.W_OK) + + +def traverse_link_tree(src_root, dest_root, follow_nonexisting=True, **kwargs): + # Yield directories before or after their contents. + order = kwargs.get('order', 'pre') + if order not in ('pre', 'post'): + raise ValueError("Order must be 'pre' or 'post'.") + + # List of relative paths to ignore under the src root. + ignore = kwargs.get('ignore', None) + if isinstance(ignore, basestring): + ignore = (ignore,) + + for dirpath, dirnames, filenames in os.walk(src_root): + rel_path = dirpath[len(src_root):] + rel_path = rel_path.lstrip(os.path.sep) + dest_dirpath = os.path.join(dest_root, rel_path) + + # Don't descend into ignored directories + if ignore and dest_dirpath in ignore: + return + + # Don't descend into dirs in dest that do not exist in src. + if not follow_nonexisting: + dirnames[:] = [ + d for d in dirnames + if os.path.exists(os.path.join(dest_dirpath, d))] + + # preorder yields directories before children + if order == 'pre': + yield (dirpath, dest_dirpath) + + for name in filenames: + src_file = os.path.join(dirpath, name) + dest_file = os.path.join(dest_dirpath, name) + + # Ignore particular paths inside the install root. + src_relpath = src_file[len(src_root):] + src_relpath = src_relpath.lstrip(os.path.sep) + if ignore and src_relpath in ignore: + continue + + yield (src_file, dest_file) + + # postorder yields directories after children + if order == 'post': + yield (dirpath, dest_dirpath) + + + +def check_link_tree(src_root, dest_root, **kwargs): + for src, dest in traverse_link_tree(src_root, dest_root, False, **kwargs): + if os.path.exists(dest) and not os.path.isdir(dest): + return dest + return None + + +def merge_link_tree(src_root, dest_root, **kwargs): + kwargs['order'] = 'pre' + for src, dest in traverse_link_tree(src_root, dest_root, **kwargs): + if os.path.isdir(src): + mkdirp(dest) + else: + assert(not os.path.exists(dest)) + os.symlink(src, dest) + + +def unmerge_link_tree(src_root, dest_root, **kwargs): + kwargs['order'] = 'post' + for src, dest in traverse_link_tree(src_root, dest_root, **kwargs): + if os.path.isdir(dest): + if not os.listdir(dest): + # TODO: what if empty directories were present pre-merge? + shutil.rmtree(dest, ignore_errors=True) + + elif os.path.exists(dest): + if not os.path.islink(dest): + raise ValueError("%s is not a link tree!" % dest) + os.remove(dest) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 6697e00e40..6763411f7d 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -138,7 +138,7 @@ sys_type = None # should live. This file is overloaded for spack core vs. for packages. # __all__ = ['Package', 'Version', 'when', 'ver'] -from spack.package import Package +from spack.package import Package, ExtensionConflictError from spack.version import Version, ver from spack.multimethod import when diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 4ab9a515cf..ff327ed504 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -53,6 +53,19 @@ class DirectoryLayout(object): self.root = root + @property + def hidden_file_paths(self): + """Return a list of hidden files used by the directory layout. + + Paths are relative to the root of an install directory. + + If the directory layout uses no hidden files to maintain + state, this should return an empty container, e.g. [] or (,). + + """ + raise NotImplementedError() + + def all_specs(self): """To be implemented by subclasses to traverse all specs for which there is a directory within the root. @@ -156,6 +169,11 @@ class SpecHashDirectoryLayout(DirectoryLayout): self.extension_file_name = extension_file_name + @property + def hidden_file_paths(self): + return ('.spec', '.extensions') + + def relative_path_for_spec(self, spec): _check_concrete(spec) dir_name = spec.format('$_$@$+$#') @@ -249,28 +267,32 @@ class SpecHashDirectoryLayout(DirectoryLayout): def get_extensions(self, spec): - path = self.extension_file_path(spec) + _check_concrete(spec) + path = self.extension_file_path(spec) extensions = set() if os.path.exists(path): - with closing(open(path)) as spec_file: - for line in spec_file: + with closing(open(path)) as ext_file: + for line in ext_file: try: - extensions.add(Spec(line)) - except SpecError, e: + extensions.add(Spec(line.strip())) + except spack.error.SpackError, e: raise InvalidExtensionSpecError(str(e)) return extensions - def write_extensions(self, extensions): + def write_extensions(self, spec, extensions): path = self.extension_file_path(spec) with closing(open(path, 'w')) as spec_file: for extension in sorted(extensions): - spec_file.write("%s\n" % extensions) + spec_file.write("%s\n" % extension) def add_extension(self, spec, extension_spec): - exts = get_extensions(spec) + _check_concrete(spec) + _check_concrete(extension_spec) + + exts = self.get_extensions(spec) if extension_spec in exts: raise ExtensionAlreadyInstalledError(spec, extension_spec) else: @@ -279,16 +301,19 @@ class SpecHashDirectoryLayout(DirectoryLayout): raise ExtensionConflictError(spec, extension_spec, already_installed) exts.add(extension_spec) - self.write_extensions(exts) + self.write_extensions(spec, exts) def remove_extension(self, spec, extension_spec): - exts = get_extensions(spec) + _check_concrete(spec) + _check_concrete(extension_spec) + + exts = self.get_extensions(spec) if not extension_spec in exts: raise NoSuchExtensionError(spec, extension_spec) exts.remove(extension_spec) - self.write_extensions(exts) + self.write_extensions(spec, exts) class DirectoryLayoutError(SpackError): @@ -328,7 +353,7 @@ class ExtensionAlreadyInstalledError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" def __init__(self, spec, extension_spec): super(ExtensionAlreadyInstalledError, self).__init__( - "%s is already installed in %s" % (extension_spec, spec)) + "%s is already installed in %s" % (extension_spec.short_spec, spec.short_spec)) class ExtensionConflictError(DirectoryLayoutError): @@ -336,12 +361,12 @@ class ExtensionConflictError(DirectoryLayoutError): def __init__(self, spec, extension_spec, conflict): super(ExtensionConflictError, self).__init__( "%s cannot be installed in %s because it conflicts with %s."% ( - extension_spec, spec, conflict)) + extension_spec.short_spec, spec.short_spec, conflict.short_spec)) class NoSuchExtensionError(DirectoryLayoutError): """Raised when an extension isn't there on remove.""" def __init__(self, spec, extension_spec): super(NoSuchExtensionError, self).__init__( - "%s cannot be removed from %s beacuse it's not installed."% ( - extension_spec, spec, conflict)) + "%s cannot be removed from %s because it's not installed."% ( + extension_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py new file mode 100644 index 0000000000..444472bffa --- /dev/null +++ b/lib/spack/spack/hooks/extensions.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import spack + + +def post_install(pkg): + assert(pkg.spec.concrete) + for name, spec in pkg.extendees.items(): + ext = pkg.spec[name] + epkg = ext.package + if epkg.installed: + epkg.do_activate(pkg) + + +def pre_uninstall(pkg): + assert(pkg.spec.concrete) + + # Need to do this b/c uninstall does not automatically do it. + # TODO: store full graph info in stored .spec file. + pkg.spec.normalize() + + for name, spec in pkg.extendees.items(): + ext = pkg.spec[name] + epkg = ext.package + if epkg.installed: + epkg.do_deactivate(pkg) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 04f0d842da..b7dae552e4 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -329,6 +329,9 @@ class Package(object): """By default we build in parallel. Subclasses can override this.""" parallel = True + """Most packages are NOT extendable. Set to True if you want extensions.""" + extendable = False + def __init__(self, spec): # this determines how the package should be built. @@ -398,6 +401,9 @@ class Package(object): self._fetch_time = 0.0 self._total_time = 0.0 + for name, spec in self.extendees.items(): + spack.db.get(spec)._check_extendable() + @property def version(self): @@ -877,6 +883,79 @@ class Package(object): spack.hooks.post_uninstall(self) + def _check_extendable(self): + if not self.extendable: + raise ValueError("Package %s is not extendable!" % self.name) + + + def _sanity_check_extension(self, extension): + self._check_extendable() + if not self.installed: + raise ValueError("Can only (de)activate extensions for installed packages.") + if not extension.installed: + raise ValueError("Extensions must first be installed.") + if not self.name in extension.extendees: + raise ValueError("%s does not extend %s!" % (extension.name, self.name)) + if not self.spec.satisfies(extension.extendees[self.name]): + raise ValueError("%s does not satisfy %s!" % (self.spec, extension.spec)) + + + def do_activate(self, extension): + self._sanity_check_extension(extension) + + self.activate(extension) + spack.install_layout.add_extension(self.spec, extension.spec) + tty.msg("Activated extension %s for %s." + % (extension.spec.short_spec, self.spec.short_spec)) + + + def activate(self, extension): + """Symlinks all files from the extension into extendee's install dir. + + Package authors can override this method to support other + extension mechanisms. Spack internals (commands, hooks, etc.) + should call do_activate() method so that proper checks are + always executed. + + """ + conflict = check_link_tree( + extension.prefix, self.prefix, + ignore=spack.install_layout.hidden_file_paths) + + if conflict: + raise ExtensionConflictError(conflict) + + merge_link_tree(extension.prefix, self.prefix, + ignore=spack.install_layout.hidden_file_paths) + + + def do_deactivate(self, extension): + self._sanity_check_extension(extension) + self.deactivate(extension) + + ext = extension.spec + if ext in spack.install_layout.get_extensions(self.spec): + spack.install_layout.remove_extension(self.spec, ext) + + tty.msg("Deactivated extension %s for %s." + % (extension.spec.short_spec, self.spec.short_spec)) + + + def deactivate(self, extension): + """Unlinks all files from extension out of extendee's install dir. + + Package authors can override this method to support other + extension mechanisms. Spack internals (commands, hooks, etc.) + should call do_deactivate() method so that proper checks are + always executed. + + """ + unmerge_link_tree(extension.prefix, self.prefix, + ignore=spack.install_layout.hidden_file_paths) + tty.msg("Deactivated %s as extension of %s." + % (extension.spec.short_spec, self.spec.short_spec)) + + def do_clean(self): if self.stage.expanded_archive_path: self.stage.chdir_to_source() @@ -1068,3 +1147,9 @@ class NoURLError(PackageError): def __init__(self, cls): super(NoURLError, self).__init__( "Package %s has no version with a URL." % cls.__name__) + + +class ExtensionConflictError(PackageError): + def __init__(self, path): + super(ExtensionConflictError, self).__init__( + "Extension blocked by file: %s" % path) diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index aaca9c199e..17bec1664f 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -68,7 +68,7 @@ provides spack install mpileaks ^mvapich spack install mpileaks ^mpich """ -__all__ = [ 'depends_on', 'provides', 'patch', 'version' ] +__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ] import re import inspect @@ -135,7 +135,7 @@ def extends(*specs): for string in specs: for spec in spack.spec.parse(string): if pkg == spec.name: - raise CircularReferenceError('depends_on', pkg) + raise CircularReferenceError('extends', pkg) dependencies[spec.name] = spec extendees[spec.name] = spec diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index e6c3e28820..953be69cc2 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,10 +1,13 @@ from spack import * + class Python(Package): """The Python programming language.""" homepage = "http://www.python.org" url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz" + extendable = True + version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') depends_on("openssl") -- cgit v1.2.3-70-g09d2 From 82946d29147bbe63855f94b9c2ebd4a21cd0a3d6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 8 Jan 2015 22:46:31 -0800 Subject: Move symlink tree routines to LinkTree class. --- lib/spack/llnl/util/filesystem.py | 82 +------------------ lib/spack/llnl/util/link_tree.py | 168 ++++++++++++++++++++++++++++++++++++++ lib/spack/spack/package.py | 16 ++-- 3 files changed, 176 insertions(+), 90 deletions(-) create mode 100644 lib/spack/llnl/util/link_tree.py diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 9fb76d3a35..0578415653 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -24,8 +24,7 @@ ############################################################################## __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', - 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe', - 'check_link_tree', 'merge_link_tree', 'unmerge_link_tree'] + 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe'] import os import sys @@ -223,82 +222,3 @@ def ancestor(dir, n=1): def can_access(file_name): """True if we have read/write access to the file.""" return os.access(file_name, os.R_OK|os.W_OK) - - -def traverse_link_tree(src_root, dest_root, follow_nonexisting=True, **kwargs): - # Yield directories before or after their contents. - order = kwargs.get('order', 'pre') - if order not in ('pre', 'post'): - raise ValueError("Order must be 'pre' or 'post'.") - - # List of relative paths to ignore under the src root. - ignore = kwargs.get('ignore', None) - if isinstance(ignore, basestring): - ignore = (ignore,) - - for dirpath, dirnames, filenames in os.walk(src_root): - rel_path = dirpath[len(src_root):] - rel_path = rel_path.lstrip(os.path.sep) - dest_dirpath = os.path.join(dest_root, rel_path) - - # Don't descend into ignored directories - if ignore and dest_dirpath in ignore: - return - - # Don't descend into dirs in dest that do not exist in src. - if not follow_nonexisting: - dirnames[:] = [ - d for d in dirnames - if os.path.exists(os.path.join(dest_dirpath, d))] - - # preorder yields directories before children - if order == 'pre': - yield (dirpath, dest_dirpath) - - for name in filenames: - src_file = os.path.join(dirpath, name) - dest_file = os.path.join(dest_dirpath, name) - - # Ignore particular paths inside the install root. - src_relpath = src_file[len(src_root):] - src_relpath = src_relpath.lstrip(os.path.sep) - if ignore and src_relpath in ignore: - continue - - yield (src_file, dest_file) - - # postorder yields directories after children - if order == 'post': - yield (dirpath, dest_dirpath) - - - -def check_link_tree(src_root, dest_root, **kwargs): - for src, dest in traverse_link_tree(src_root, dest_root, False, **kwargs): - if os.path.exists(dest) and not os.path.isdir(dest): - return dest - return None - - -def merge_link_tree(src_root, dest_root, **kwargs): - kwargs['order'] = 'pre' - for src, dest in traverse_link_tree(src_root, dest_root, **kwargs): - if os.path.isdir(src): - mkdirp(dest) - else: - assert(not os.path.exists(dest)) - os.symlink(src, dest) - - -def unmerge_link_tree(src_root, dest_root, **kwargs): - kwargs['order'] = 'post' - for src, dest in traverse_link_tree(src_root, dest_root, **kwargs): - if os.path.isdir(dest): - if not os.listdir(dest): - # TODO: what if empty directories were present pre-merge? - shutil.rmtree(dest, ignore_errors=True) - - elif os.path.exists(dest): - if not os.path.islink(dest): - raise ValueError("%s is not a link tree!" % dest) - os.remove(dest) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py new file mode 100644 index 0000000000..19c2d46938 --- /dev/null +++ b/lib/spack/llnl/util/link_tree.py @@ -0,0 +1,168 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""LinkTree class for setting up trees of symbolic links.""" +__all__ = ['LinkTree'] + +import os +import shutil +from llnl.util.filesystem import mkdirp + + +class LinkTree(object): + """Class to create trees of symbolic links from a source directory. + + LinkTree objects are constructed with a source root. Their + methods allow you to create and delete trees of symbolic links + back to the source tree in specific destination directories. + Trees comprise symlinks only to files; directries are never + symlinked to, to prevent the source directory from ever being + modified. + + """ + def __init__(self, source_root): + self._root = source_root + + + def traverse(self, dest_root, **kwargs): + """Traverse LinkTree root and dest simultaneously. + + Walks the LinkTree directory in pre or post order. Yields + each file in the source directory with a matching path from + the dest directory. e.g., for this tree:: + + root/ + a/ + file1 + file2 + b/ + file3 + + When called on dest, this yields:: + + ('root', 'dest') + ('root/a', 'dest/a') + ('root/a/file1', 'dest/a/file1') + ('root/a/file2', 'dest/a/file2') + ('root/b', 'dest/b') + ('root/b/file3', 'dest/b/file3') + + Optional args: + + order=[pre|post] -- Whether to do pre- or post-order traveral. + + ignore= -- Optional container of root-relative + paths to ignore. + + follow_nonexisting -- Whether to descend into directories in + src that do not exit in dest. + + """ + # Yield directories before or after their contents. + order = kwargs.get('order', 'pre') + if order not in ('pre', 'post'): + raise ValueError("Order must be 'pre' or 'post'.") + + # List of relative paths to ignore under the src root. + ignore = kwargs.get('ignore', None) + if isinstance(ignore, basestring): + ignore = (ignore,) + + # Whether to descend when dirs dont' exist in dest. + follow_nonexisting = kwargs.get('follow_nonexisting', True) + + for dirpath, dirnames, filenames in os.walk(self._root): + rel_path = dirpath[len(self._root):] + rel_path = rel_path.lstrip(os.path.sep) + dest_dirpath = os.path.join(dest_root, rel_path) + + # Don't descend into ignored directories + if ignore and dest_dirpath in ignore: + return + + # Don't descend into dirs in dest that do not exist in src. + if not follow_nonexisting: + dirnames[:] = [ + d for d in dirnames + if os.path.exists(os.path.join(dest_dirpath, d))] + + # preorder yields directories before children + if order == 'pre': + yield (dirpath, dest_dirpath) + + for name in filenames: + src_file = os.path.join(dirpath, name) + dest_file = os.path.join(dest_dirpath, name) + + # Ignore particular paths inside the install root. + src_relpath = src_file[len(self._root):] + src_relpath = src_relpath.lstrip(os.path.sep) + if ignore and src_relpath in ignore: + continue + + yield (src_file, dest_file) + + # postorder yields directories after children + if order == 'post': + yield (dirpath, dest_dirpath) + + + + def find_conflict(self, dest_root, **kwargs): + """Returns the first file in dest that also exists in src.""" + kwargs['follow_nonexisting'] = False + for src, dest in self.traverse(dest_root, **kwargs): + if os.path.exists(dest) and not os.path.isdir(dest): + return dest + return None + + + def merge(self, dest_root, **kwargs): + """Link all files in src into dest, creating directories if necessary.""" + kwargs['order'] = 'pre' + for src, dest in self.traverse(dest_root, **kwargs): + if os.path.isdir(src): + mkdirp(dest) + else: + assert(not os.path.exists(dest)) + os.symlink(src, dest) + + + def unmerge(self, dest_root, **kwargs): + """Unlink all files in dest that exist in src. + + Unlinks directories in dest if they are empty. + + """ + kwargs['order'] = 'post' + for src, dest in self.traverse(dest_root, **kwargs): + if os.path.isdir(dest): + if not os.listdir(dest): + # TODO: what if empty directories were present pre-merge? + shutil.rmtree(dest, ignore_errors=True) + + elif os.path.exists(dest): + if not os.path.islink(dest): + raise ValueError("%s is not a link tree!" % dest) + os.remove(dest) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b7dae552e4..da251dc4e8 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -45,6 +45,7 @@ import textwrap from StringIO import StringIO import llnl.util.tty as tty +from llnl.util.link_tree import LinkTree from llnl.util.filesystem import * from llnl.util.lang import * @@ -918,15 +919,12 @@ class Package(object): always executed. """ - conflict = check_link_tree( - extension.prefix, self.prefix, - ignore=spack.install_layout.hidden_file_paths) - + tree = LinkTree(extension.prefix) + conflict = tree.find_conflict( + self.prefix, ignore=spack.install_layout.hidden_file_paths) if conflict: raise ExtensionConflictError(conflict) - - merge_link_tree(extension.prefix, self.prefix, - ignore=spack.install_layout.hidden_file_paths) + tree.merge(self.prefix, ignore=spack.install_layout.hidden_file_paths) def do_deactivate(self, extension): @@ -950,8 +948,8 @@ class Package(object): always executed. """ - unmerge_link_tree(extension.prefix, self.prefix, - ignore=spack.install_layout.hidden_file_paths) + tree = LinkTree(extension.prefix) + tree.unmerge(self.prefix, ignore=spack.install_layout.hidden_file_paths) tty.msg("Deactivated %s as extension of %s." % (extension.spec.short_spec, self.spec.short_spec)) -- cgit v1.2.3-70-g09d2 From bcccf020204a556e382c0af2897ad9126bb24984 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Jan 2015 19:37:01 -0800 Subject: Add setup_extension_environment() method. - lets packages do some setup before their extensions run install() --- lib/spack/spack/package.py | 30 +++++++++++++++++++++++++++++ var/spack/packages/py-setuptools/package.py | 6 ------ var/spack/packages/python/package.py | 23 +++++++++++++++++++++- 3 files changed, 52 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index da251dc4e8..8504b96fcf 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -783,6 +783,12 @@ class Package(object): self.stage.chdir_to_source() build_env.setup_package(self) + # Allow extendees to further set up the environment. + for ext_name in self.extendees: + ext_spec = self.spec[ext_name] + ext_spec.package.setup_extension_environment( + self.module, ext_spec, self.spec) + if fake_install: self.do_fake_install() else: @@ -854,6 +860,30 @@ class Package(object): fromlist=[self.__class__.__name__]) + def setup_extension_environment(self, module, spec, ext_spec): + """Called before the install() method of extensions. + + Default implementation does nothing, but this can be + overridden by an extendable package to set up the install + environment for its extensions. This is useful if there are + some common steps to installing all extensions for a + certain package. + + Some examples: + + 1. Installing python modules generally requires PYTHONPATH to + point to the lib/pythonX.Y/site-packages directory in the + module's install prefix. This could set that variable. + + 2. Extensions often need to invoke the 'python' interpreter + from the Python installation being extended. This routine can + put a 'python' Execuable object in the module scope for the + extension package to simplify extension installs. + + """ + pass + + def install(self, spec, prefix): """Package implementations override this with their own build configuration.""" raise InstallError("Package %s provides no install method!" % self.name) diff --git a/var/spack/packages/py-setuptools/package.py b/var/spack/packages/py-setuptools/package.py index e2c4e1a0be..755288d55c 100644 --- a/var/spack/packages/py-setuptools/package.py +++ b/var/spack/packages/py-setuptools/package.py @@ -10,10 +10,4 @@ class PySetuptools(Package): extends('python') def install(self, spec, prefix): - site_packages_dir = "%s/lib/python2.7/site-packages" % prefix - mkdirp(site_packages_dir) - - env['PYTHONPATH'] = site_packages_dir - - python = which('python') python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 953be69cc2..9700179ab8 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,5 +1,5 @@ from spack import * - +import os class Python(Package): """The Python programming language.""" @@ -26,3 +26,24 @@ class Python(Package): "--enable-shared") make() make("install") + + + def setup_extension_environment(self, module, spec, ext_spec): + """Called before python modules' install() methods. + + In most cases, extensions will only need to have one line:: + + python('setup.py', 'install', '--prefix=%s' % prefix) + """ + # Python extension builds can have a global python executable function + module.python = Executable(join_path(spec.prefix.bin, 'python')) + + # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. + module.python_lib_dir = join_path(ext_spec.prefix.lib, 'python%d.%d' % self.version[:2]) + module.site_packages_dir = join_path(module.python_lib_dir, 'site-packages') + + # Add site packages directory to the PYTHONPATH + os.environ['PYTHONPATH'] = module.site_packages_dir + + # Make the site packages directory if it does not exist already. + mkdirp(module.site_packages_dir) -- cgit v1.2.3-70-g09d2 From d13bbeb605f56214db919f6f122a8fa6ba67ddbc Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 10 Jan 2015 19:52:07 -0800 Subject: Add PYTOHNPATH to modules for python extensions. --- lib/spack/spack/modules.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 755e9ea900..7d2ca97a62 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -49,6 +49,7 @@ import os import re import textwrap import shutil +from glob import glob from contextlib import closing import llnl.util.tty as tty @@ -123,6 +124,13 @@ class EnvModule(object): if os.path.isdir(directory): add_path(var, directory) + # Add python path unless it's an actual python installation + # TODO: is there a better way to do this? + if self.spec.name != 'python': + site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages")) + if site_packages: + add_path('PYTHONPATH', site_packages[0]) + # short description is just the package + version # TODO: maybe packages can optionally provide it. self.short_description = self.spec.format("$_ $@") -- cgit v1.2.3-70-g09d2 From acc62abbd08f6d44c930b0ceed74fb9e47f365cf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 12 Jan 2015 22:39:18 -0800 Subject: Rework do_activate/activate and do_deactivate/deactivate semantics. - packages can now extend only one other package. - do_activate() and do_deactivate() are now called on the extension, and they automatically find the extendee - activate() and deactivate() are still called on the extendee and are passed the extension. --- lib/spack/spack/cmd/__init__.py | 15 ++++++ lib/spack/spack/cmd/location.py | 47 ++++++++---------- lib/spack/spack/cmd/uninstall.py | 1 - lib/spack/spack/hooks/extensions.py | 15 +----- lib/spack/spack/package.py | 97 ++++++++++++++++++++++++------------- lib/spack/spack/packages.py | 2 + lib/spack/spack/relations.py | 2 + 7 files changed, 105 insertions(+), 74 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 537db536dd..b96ac5af51 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10): return line_list[:max_num-1] + ['...'] + line_list[-1:] else: return line_list + + +def disambiguate_spec(spec): + matching_specs = spack.db.get_installed(spec) + if not matching_specs: + tty.die("Spec '%s' matches no installed packages." % spec) + + elif len(matching_specs) > 1: + args = ["%s matches multiple packages." % spec, + "Matching packages:"] + args += [" " + str(s) for s in matching_specs] + args += ["Use a more specific spec."] + tty.die(*args) + + return matching_specs[0] diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 509c336b69..810c34d0a6 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -77,37 +77,30 @@ def location(parser, args): tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Supply only one.") - spec = specs[0] if args.install_dir: # install_dir command matches against installed specs. - matching_specs = spack.db.get_installed(spec) - if not matching_specs: - tty.die("Spec '%s' matches no installed packages." % spec) + spec = spack.cmd.disambiguate_spec(specs[0]) + print spec.prefix - elif len(matching_specs) > 1: - args = ["%s matches multiple packages." % spec, - "Matching packages:"] - args += [" " + str(s) for s in matching_specs] - args += ["Use a more specific spec."] - tty.die(*args) + else: + spec = specs[0] - print matching_specs[0].prefix + if args.package_dir: + # This one just needs the spec name. + print join_path(spack.db.root, spec.name) - elif args.package_dir: - # This one just needs the spec name. - print join_path(spack.db.root, spec.name) + else: + # These versions need concretized specs. + spec.concretize() + pkg = spack.db.get(spec) + + if args.stage_dir: + print pkg.stage.path + + else: # args.build_dir is the default. + if not pkg.stage.source_path: + tty.die("Build directory does not exist yet. Run this to create it:", + "spack stage " + " ".join(args.spec)) + print pkg.stage.source_path - else: - # These versions need concretized specs. - spec.concretize() - pkg = spack.db.get(spec) - - if args.stage_dir: - print pkg.stage.path - - else: # args.build_dir is the default. - if not pkg.stage.source_path: - tty.die("Build directory does not exist yet. Run this to create it:", - "spack stage " + " ".join(args.spec)) - print pkg.stage.source_path diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index e787c460ad..0962942f43 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -65,7 +65,6 @@ def uninstall(parser, args): " b) use a more specific spec."] tty.die(*args) - if len(matching_specs) == 0: tty.die("%s does not match any installed packages." % spec) diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index 444472bffa..2cf506beed 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -27,23 +27,12 @@ import spack def post_install(pkg): - assert(pkg.spec.concrete) - for name, spec in pkg.extendees.items(): - ext = pkg.spec[name] - epkg = ext.package - if epkg.installed: - epkg.do_activate(pkg) + pkg.do_activate() def pre_uninstall(pkg): - assert(pkg.spec.concrete) - # Need to do this b/c uninstall does not automatically do it. # TODO: store full graph info in stored .spec file. pkg.spec.normalize() - for name, spec in pkg.extendees.items(): - ext = pkg.spec[name] - epkg = ext.package - if epkg.installed: - epkg.do_deactivate(pkg) + pkg.do_deactivate() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 8504b96fcf..ae34f8ae45 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -315,15 +315,18 @@ class Package(object): """Specs of virtual packages provided by this package, keyed by name.""" provided = {} - """Specs of packages this one extends, keyed by name.""" - extendees = {} - """Specs of conflicting packages, keyed by name. """ conflicted = {} """Patches to apply to newly expanded source, if any.""" patches = {} + """Specs of package this one extends, or None. + + Currently, ppackages can extend at most one other package. + """ + extendees = {} + # # These are default values for instance variables. # @@ -402,8 +405,8 @@ class Package(object): self._fetch_time = 0.0 self._total_time = 0.0 - for name, spec in self.extendees.items(): - spack.db.get(spec)._check_extendable() + if self.is_extension: + spack.db.get(self.extendee_spec)._check_extendable() @property @@ -491,6 +494,34 @@ class Package(object): self._fetcher = f + @property + def extendee_spec(self): + """Spec of the extendee of this package, or None if it is not an extension.""" + if not self.extendees: return None + + name = next(iter(self.extendees)) + if not name in self.spec: + return self.extendees[name] + + # Need to do this to get the concrete version of the spec + return self.spec[name] + + + @property + def is_extension(self): + return len(self.extendees) > 0 + + + @property + def activated(self): + if not self.spec.concrete: + raise ValueError("Only concrete package extensions can be activated.") + if not self.is_extension: + raise ValueError("is_extension called on package that is not an extension.") + + return self.spec in spack.install_layout.get_extensions(self.extendee_spec) + + def preorder_traversal(self, visited=None, **kwargs): """This does a preorder traversal of the package's dependence DAG.""" virtual = kwargs.get("virtual", False) @@ -784,10 +815,9 @@ class Package(object): build_env.setup_package(self) # Allow extendees to further set up the environment. - for ext_name in self.extendees: - ext_spec = self.spec[ext_name] - ext_spec.package.setup_extension_environment( - self.module, ext_spec, self.spec) + if self.is_extension: + self.extendee_spec.package.setup_extension_environment( + self.module, self.extendee_spec, self.spec) if fake_install: self.do_fake_install() @@ -840,7 +870,6 @@ class Package(object): if returncode != 0: sys.exit(1) - # Once everything else is done, run post install hooks spack.hooks.post_install(self) @@ -919,25 +948,30 @@ class Package(object): raise ValueError("Package %s is not extendable!" % self.name) - def _sanity_check_extension(self, extension): - self._check_extendable() - if not self.installed: + def _sanity_check_extension(self): + extendee_package = self.extendee_spec.package + extendee_package._check_extendable() + + if not extendee_package.installed: raise ValueError("Can only (de)activate extensions for installed packages.") - if not extension.installed: + if not self.installed: raise ValueError("Extensions must first be installed.") - if not self.name in extension.extendees: - raise ValueError("%s does not extend %s!" % (extension.name, self.name)) - if not self.spec.satisfies(extension.extendees[self.name]): - raise ValueError("%s does not satisfy %s!" % (self.spec, extension.spec)) + if not self.extendee_spec.name in self.extendees: + raise ValueError("%s does not extend %s!" % (self.name, self.extendee.name)) + + def do_activate(self): + """Called on an etension to invoke the extendee's activate method. - def do_activate(self, extension): - self._sanity_check_extension(extension) + Commands should call this routine, and should not call + activate() directly. + """ + self._sanity_check_extension() + self.extendee_spec.package.activate(self) - self.activate(extension) - spack.install_layout.add_extension(self.spec, extension.spec) + spack.install_layout.add_extension(self.extendee_spec, self.spec) tty.msg("Activated extension %s for %s." - % (extension.spec.short_spec, self.spec.short_spec)) + % (self.spec.short_spec, self.extendee_spec.short_spec)) def activate(self, extension): @@ -957,20 +991,19 @@ class Package(object): tree.merge(self.prefix, ignore=spack.install_layout.hidden_file_paths) - def do_deactivate(self, extension): - self._sanity_check_extension(extension) - self.deactivate(extension) + def do_deactivate(self): + self._sanity_check_extension() + self.extendee_spec.package.deactivate(self) - ext = extension.spec - if ext in spack.install_layout.get_extensions(self.spec): - spack.install_layout.remove_extension(self.spec, ext) + if self.spec in spack.install_layout.get_extensions(self.extendee_spec): + spack.install_layout.remove_extension(self.extendee_spec, self.spec) tty.msg("Deactivated extension %s for %s." - % (extension.spec.short_spec, self.spec.short_spec)) + % (self.spec.short_spec, self.extendee_spec.short_spec)) def deactivate(self, extension): - """Unlinks all files from extension out of extendee's install dir. + """Unlinks all files from extension out of this package's install dir. Package authors can override this method to support other extension mechanisms. Spack internals (commands, hooks, etc.) @@ -980,8 +1013,6 @@ class Package(object): """ tree = LinkTree(extension.prefix) tree.unmerge(self.prefix, ignore=spack.install_layout.hidden_file_paths) - tty.msg("Deactivated %s as extension of %s." - % (extension.spec.short_spec, self.spec.short_spec)) def do_clean(self): diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index db43d3909a..bb5a94bcab 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -77,6 +77,8 @@ class PackageDB(object): copy = spec.copy() self.instances[copy] = package_class(copy) except Exception, e: + if spack.debug: + sys.excepthook(*sys.exc_info()) raise FailedConstructorError(spec.name, e) return self.instances[spec] diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index 17bec1664f..60ff5bef34 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -131,6 +131,8 @@ def extends(*specs): clocals = caller_locals() dependencies = clocals.setdefault('dependencies', {}) extendees = clocals.setdefault('extendees', {}) + if extendees: + raise RelationError("Packages can extend at most one other package.") for string in specs: for spec in spack.spec.parse(string): -- cgit v1.2.3-70-g09d2 From 89ccdf92cd42a52fa42d39e2be580eac110e264a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 12 Jan 2015 22:41:27 -0800 Subject: Add activate and deactivate commands for extensions. --- lib/spack/spack/cmd/activate.py | 50 +++++++++++++++++++++++++++++++++++++++ lib/spack/spack/cmd/deactivate.py | 50 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) create mode 100644 lib/spack/spack/cmd/activate.py create mode 100644 lib/spack/spack/cmd/deactivate.py diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py new file mode 100644 index 0000000000..c1e23852d6 --- /dev/null +++ b/lib/spack/spack/cmd/activate.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import llnl.util.tty as tty +import spack +import spack.cmd + +description = "Activate a package extension." + +def setup_parser(subparser): + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") + + +def activate(parser, args): + specs = spack.cmd.parse_specs(args.spec, concretize=True) + if len(specs) != 1: + tty.die("activate requires one spec. %d given." % len(specs)) + + # TODO: remove this hack when DAG info is stored in dir layout. + # This ensures the ext spec is always normalized properly. + spack.db.get(specs[0]) + + spec = spack.cmd.disambiguate_spec(specs[0]) + if spec.package.activated: + tty.die("Package %s is already activated." % specs[0].short_spec) + + spec.package.do_activate() diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py new file mode 100644 index 0000000000..fd13f051df --- /dev/null +++ b/lib/spack/spack/cmd/deactivate.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import llnl.util.tty as tty +import spack +import spack.cmd + +description = "Deactivate a package extension." + +def setup_parser(subparser): + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") + + +def deactivate(parser, args): + specs = spack.cmd.parse_specs(args.spec, concretize=True) + if len(specs) != 1: + tty.die("deactivate requires one spec. %d given." % len(specs)) + + # TODO: remove this hack when DAG info is stored in dir layout. + # This ensures the ext spec is always normalized properly. + spack.db.get(specs[0]) + + spec = spack.cmd.disambiguate_spec(specs[0]) + if not spec.package.activated: + tty.die("Package %s is not activated." % specs[0].short_spec) + + spec.package.do_deactivate() -- cgit v1.2.3-70-g09d2 From 2ae7f53b8359841fc5d1d7b6c70ff72f38bf2d88 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 15 Jan 2015 09:33:51 -0800 Subject: Bugfix: Extension hooks shoudl only run for extensions. --- lib/spack/spack/hooks/extensions.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index 2cf506beed..718b24b965 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -27,7 +27,8 @@ import spack def post_install(pkg): - pkg.do_activate() + if pkg.is_extension: + pkg.do_activate() def pre_uninstall(pkg): @@ -35,4 +36,5 @@ def pre_uninstall(pkg): # TODO: store full graph info in stored .spec file. pkg.spec.normalize() - pkg.do_deactivate() + if pkg.is_extension: + pkg.do_deactivate() -- cgit v1.2.3-70-g09d2 From 7992f415fe0e50c5e15964f348ed23c1fc1795b5 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Thu, 15 Jan 2015 10:11:40 -0800 Subject: added py-nose --- var/spack/packages/py-nose/package.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 var/spack/packages/py-nose/package.py diff --git a/var/spack/packages/py-nose/package.py b/var/spack/packages/py-nose/package.py new file mode 100644 index 0000000000..7bd7106b8c --- /dev/null +++ b/var/spack/packages/py-nose/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyNose(Package): + """nose extends the test loading and running features of unittest, making it easier to write, find and run tests.""" + homepage = "https://pypi.python.org/pypi/nose" + url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz" + + version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From 9fa489b7f2dab452929729e300f189f8c50b817c Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Tue, 20 Jan 2015 09:39:21 -0800 Subject: added several modules --- var/spack/packages/py-cython/package.py | 13 +++++++++++++ var/spack/packages/py-dateutil/package.py | 13 +++++++++++++ var/spack/packages/py-epydoc/package.py | 13 +++++++++++++ var/spack/packages/py-ipython/package.py | 14 ++++++++++++++ var/spack/packages/py-matplotlib/package.py | 20 ++++++++++++++++++++ var/spack/packages/py-numpy/package.py | 14 ++++++++++++++ var/spack/packages/py-pexpect/package.py | 13 +++++++++++++ var/spack/packages/py-pygments/package.py | 14 ++++++++++++++ var/spack/packages/py-pyparsing/package.py | 13 +++++++++++++ var/spack/packages/py-pyside/package.py | 18 ++++++++++++++++++ var/spack/packages/py-pytz/package.py | 13 +++++++++++++ var/spack/packages/py-scipy/package.py | 15 +++++++++++++++ var/spack/packages/py-six/package.py | 13 +++++++++++++ var/spack/packages/py-virtualenv/package.py | 18 ++++++++++++++++++ 14 files changed, 204 insertions(+) create mode 100644 var/spack/packages/py-cython/package.py create mode 100644 var/spack/packages/py-dateutil/package.py create mode 100644 var/spack/packages/py-epydoc/package.py create mode 100644 var/spack/packages/py-ipython/package.py create mode 100644 var/spack/packages/py-matplotlib/package.py create mode 100644 var/spack/packages/py-numpy/package.py create mode 100644 var/spack/packages/py-pexpect/package.py create mode 100644 var/spack/packages/py-pygments/package.py create mode 100644 var/spack/packages/py-pyparsing/package.py create mode 100644 var/spack/packages/py-pyside/package.py create mode 100644 var/spack/packages/py-pytz/package.py create mode 100644 var/spack/packages/py-scipy/package.py create mode 100644 var/spack/packages/py-six/package.py create mode 100644 var/spack/packages/py-virtualenv/package.py diff --git a/var/spack/packages/py-cython/package.py b/var/spack/packages/py-cython/package.py new file mode 100644 index 0000000000..af67a15526 --- /dev/null +++ b/var/spack/packages/py-cython/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyCython(Package): + """The Cython compiler for writing C extensions for the Python language.""" + homepage = "https://pypi.python.org/pypi/cython" + url = "https://pypi.python.org/packages/source/C/Cython/Cython-0.21.2.tar.gz" + + version('0.21.2', 'd21adb870c75680dc857cd05d41046a4') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-dateutil/package.py b/var/spack/packages/py-dateutil/package.py new file mode 100644 index 0000000000..96e3ecab07 --- /dev/null +++ b/var/spack/packages/py-dateutil/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyDateutil(Package): + """Extensions to the standard Python datetime module.""" + homepage = "https://pypi.python.org/pypi/dateutil" + url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz" + + version('2.4.0', '75714163bb96bedd07685cdb2071b8bc') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-epydoc/package.py b/var/spack/packages/py-epydoc/package.py new file mode 100644 index 0000000000..af05510504 --- /dev/null +++ b/var/spack/packages/py-epydoc/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyEpydoc(Package): + """Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings.""" + homepage = "https://pypi.python.org/pypi/epydoc" + url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz" + + version('3.0.1', '36407974bd5da2af00bf90ca27feeb44') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-ipython/package.py b/var/spack/packages/py-ipython/package.py new file mode 100644 index 0000000000..731e661dfd --- /dev/null +++ b/var/spack/packages/py-ipython/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyIpython(Package): + """IPython provides a rich toolkit to help you make the most out of using Python interactively.""" + homepage = "https://pypi.python.org/pypi/ipython" + url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz" + + version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf') + + extends('python') + depends_on('py-pygments') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py new file mode 100644 index 0000000000..836273c923 --- /dev/null +++ b/var/spack/packages/py-matplotlib/package.py @@ -0,0 +1,20 @@ +from spack import * + +class PyMatplotlib(Package): + """Python plotting package.""" + homepage = "https://pypi.python.org/pypi/matplotlib" + url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz" + + version('1.4.2', '7d22efb6cce475025733c50487bd8898') + + extends('python') + depends_on('py-pyside') + depends_on('py-ipython') + depends_on('py-pyparsing') + depends_on('py-six') + depends_on('py-dateutil') + depends_on('py-pytz') + depends_on('py-nose') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py new file mode 100644 index 0000000000..e6cb6a464f --- /dev/null +++ b/var/spack/packages/py-numpy/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyNumpy(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/numpy" + url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz" + + version('1.9.1', '78842b73560ec378142665e712ae4ad9') + + extends('python') + depends_on('py-nose') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pexpect/package.py b/var/spack/packages/py-pexpect/package.py new file mode 100644 index 0000000000..ff5fac84e0 --- /dev/null +++ b/var/spack/packages/py-pexpect/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPexpect(Package): + """Pexpect allows easy control of interactive console applications.""" + homepage = "https://pypi.python.org/pypi/pexpect" + url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz" + + version('3.3', '0de72541d3f1374b795472fed841dce8') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pygments/package.py b/var/spack/packages/py-pygments/package.py new file mode 100644 index 0000000000..990eebde65 --- /dev/null +++ b/var/spack/packages/py-pygments/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyPygments(Package): + """Pygments is a syntax highlighting package written in Python.""" + homepage = "https://pypi.python.org/pypi/pygments" + url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz" + + version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyparsing/package.py b/var/spack/packages/py-pyparsing/package.py new file mode 100644 index 0000000000..a6e50ad139 --- /dev/null +++ b/var/spack/packages/py-pyparsing/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPyparsing(Package): + """A Python Parsing Module.""" + homepage = "https://pypi.python.org/pypi/pyparsing" + url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz" + + version('2.0.3', '0fe479be09fc2cf005f753d3acc35939') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py new file mode 100644 index 0000000000..b01e16d7e6 --- /dev/null +++ b/var/spack/packages/py-pyside/package.py @@ -0,0 +1,18 @@ +from spack import * +import spack.package +import os + +class PyPyside(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/pyside" + url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz" + + version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d') + + extends('python') + + def install(self, spec, prefix): + qmake_path = '/usr/lib64/qt4/bin/qmake' + if not os.path.exists(qmake_path): + raise spack.package.InstallError("Failed to find qmake in %s" % qmake_path) + python('setup.py', 'install', '--prefix=%s' % prefix, '--qmake=%s' % qmake_path) diff --git a/var/spack/packages/py-pytz/package.py b/var/spack/packages/py-pytz/package.py new file mode 100644 index 0000000000..80bcfe82ca --- /dev/null +++ b/var/spack/packages/py-pytz/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPytz(Package): + """World timezone definitions, modern and historical.""" + homepage = "https://pypi.python.org/pypi/pytz" + url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz" + + version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scipy/package.py b/var/spack/packages/py-scipy/package.py new file mode 100644 index 0000000000..b5325b919f --- /dev/null +++ b/var/spack/packages/py-scipy/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyScipy(Package): + """Scientific Library for Python.""" + homepage = "https://pypi.python.org/pypi/scipy" + url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz" + + version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a') + + extends('python') + depends_on('py-nose') + depends_on('py-numpy') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-six/package.py b/var/spack/packages/py-six/package.py new file mode 100644 index 0000000000..04d29adced --- /dev/null +++ b/var/spack/packages/py-six/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PySix(Package): + """Python 2 and 3 compatibility utilities.""" + homepage = "https://pypi.python.org/pypi/six" + url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz" + + version('1.9.0', '476881ef4012262dfc8adc645ee786c4') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-virtualenv/package.py b/var/spack/packages/py-virtualenv/package.py new file mode 100644 index 0000000000..c1b359e164 --- /dev/null +++ b/var/spack/packages/py-virtualenv/package.py @@ -0,0 +1,18 @@ +from spack import * +import shutil + +class PyVirtualenv(Package): + """virtualenv is a tool to create isolated Python environments.""" + homepage = "http://virtualenv.readthedocs.org/projects/virtualenv/" + url = "https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.11.6.tar.gz" + + version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49') + + extends('python') + + def clean(self): + if os.path.exists('build'): + shutil.rmtree('build') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From ff9cb94f4f92112739f53881bcb0a9a19811684d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 20 Jan 2015 00:23:16 -0800 Subject: Add arguements to extends() and activate/deactivate. --- lib/spack/spack/package.py | 40 ++++++++++++++++++++++++++---------- lib/spack/spack/relations.py | 17 ++++++++------- var/spack/packages/python/package.py | 32 +++++++++++++++++++++++++++-- 3 files changed, 69 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index ae34f8ae45..bd63c2e0c0 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -497,16 +497,26 @@ class Package(object): @property def extendee_spec(self): """Spec of the extendee of this package, or None if it is not an extension.""" - if not self.extendees: return None - + if not self.extendees: + return None name = next(iter(self.extendees)) if not name in self.spec: - return self.extendees[name] + spec, kwargs = self.extendees[name] + return spec # Need to do this to get the concrete version of the spec return self.spec[name] + @property + def extendee_args(self): + """Spec of the extendee of this package, or None if it is not an extension.""" + if not self.extendees: + return None + name = next(iter(self.extendees)) + return self.extendees[name][1] + + @property def is_extension(self): return len(self.extendees) > 0 @@ -949,6 +959,8 @@ class Package(object): def _sanity_check_extension(self): + if not self.is_extension: + raise ValueError("This package is not an extension.") extendee_package = self.extendee_spec.package extendee_package._check_extendable() @@ -967,14 +979,14 @@ class Package(object): activate() directly. """ self._sanity_check_extension() - self.extendee_spec.package.activate(self) + self.extendee_spec.package.activate(self, **self.extendee_args) spack.install_layout.add_extension(self.extendee_spec, self.spec) tty.msg("Activated extension %s for %s." % (self.spec.short_spec, self.extendee_spec.short_spec)) - def activate(self, extension): + def activate(self, extension, **kwargs): """Symlinks all files from the extension into extendee's install dir. Package authors can override this method to support other @@ -983,17 +995,20 @@ class Package(object): always executed. """ + ignore_files = set(spack.install_layout.hidden_file_paths) + ignore_files.update(kwargs.get('ignore', ())) + tree = LinkTree(extension.prefix) - conflict = tree.find_conflict( - self.prefix, ignore=spack.install_layout.hidden_file_paths) + conflict = tree.find_conflict(self.prefix, ignore=ignore_files) if conflict: raise ExtensionConflictError(conflict) - tree.merge(self.prefix, ignore=spack.install_layout.hidden_file_paths) + tree.merge(self.prefix, ignore=ignore_files) def do_deactivate(self): + """Called on the extension to invoke extendee's deactivate() method.""" self._sanity_check_extension() - self.extendee_spec.package.deactivate(self) + self.extendee_spec.package.deactivate(self, **self.extendee_args) if self.spec in spack.install_layout.get_extensions(self.extendee_spec): spack.install_layout.remove_extension(self.extendee_spec, self.spec) @@ -1002,7 +1017,7 @@ class Package(object): % (self.spec.short_spec, self.extendee_spec.short_spec)) - def deactivate(self, extension): + def deactivate(self, extension, **kwargs): """Unlinks all files from extension out of this package's install dir. Package authors can override this method to support other @@ -1011,8 +1026,11 @@ class Package(object): always executed. """ + ignore_files = set(spack.install_layout.hidden_file_paths) + ignore_files.update(kwargs.get('ignore', ())) + tree = LinkTree(extension.prefix) - tree.unmerge(self.prefix, ignore=spack.install_layout.hidden_file_paths) + tree.unmerge(self.prefix, ignore=ignore_files) def do_clean(self): diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index 60ff5bef34..a0c7723473 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -117,7 +117,7 @@ def depends_on(*specs): dependencies[spec.name] = spec -def extends(*specs): +def extends(spec, **kwargs): """Same as depends_on, but dependency is symlinked into parent prefix. This is for Python and other language modules where the module @@ -126,6 +126,10 @@ def extends(*specs): but allowing ONE module version to be symlinked into a parent Python install at a time. + keyword arguments can be passed to extends() so that extension + packages can pass parameters to the extendee's extension + mechanism. + """ pkg = get_calling_package_name() clocals = caller_locals() @@ -134,12 +138,11 @@ def extends(*specs): if extendees: raise RelationError("Packages can extend at most one other package.") - for string in specs: - for spec in spack.spec.parse(string): - if pkg == spec.name: - raise CircularReferenceError('extends', pkg) - dependencies[spec.name] = spec - extendees[spec.name] = spec + spec = Spec(spec) + if pkg == spec.name: + raise CircularReferenceError('extends', pkg) + dependencies[spec.name] = spec + extendees[spec.name] = (spec, kwargs) def provides(*specs, **kwargs): diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 9700179ab8..86b903bc23 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -28,6 +28,16 @@ class Python(Package): make("install") + @property + def python_lib_dir(self): + return os.path.join('lib', 'python%d.%d' % self.version[:2]) + + + @property + def site_packages_dir(self): + return os.path.join(self.python_lib_dir, 'site-packages') + + def setup_extension_environment(self, module, spec, ext_spec): """Called before python modules' install() methods. @@ -39,11 +49,29 @@ class Python(Package): module.python = Executable(join_path(spec.prefix.bin, 'python')) # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. - module.python_lib_dir = join_path(ext_spec.prefix.lib, 'python%d.%d' % self.version[:2]) - module.site_packages_dir = join_path(module.python_lib_dir, 'site-packages') + module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) + module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) # Add site packages directory to the PYTHONPATH os.environ['PYTHONPATH'] = module.site_packages_dir # Make the site packages directory if it does not exist already. mkdirp(module.site_packages_dir) + + + def add_ignore_files(self, args): + """Add some ignore files to activate/deactivate args.""" + ignore = set(args.get('ignore', ())) + ignore.add(os.path.join(self.site_packages_dir, 'site.py')) + ignore.add(os.path.join(self.site_packages_dir, 'site.pyc')) + args.update(ignore=ignore) + + + def activate(self, ext_pkg, **args): + self.add_ignore_files(args) + super(Python, self).activate(ext_pkg, **args) + + + def deactivate(self, ext_pkg, **args): + self.add_ignore_files(args) + super(Python, self).deactivate(ext_pkg, **args) -- cgit v1.2.3-70-g09d2 From de91c95e8e45b8ab066ba3dfc8f89c92da761b5a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 20 Jan 2015 15:07:53 -0800 Subject: Ability to ignore files in activate/deactivate for extensions. --- lib/spack/llnl/util/link_tree.py | 11 ++++------- lib/spack/spack/package.py | 16 +++++++++------- var/spack/packages/py-nose/package.py | 4 +++- var/spack/packages/python/package.py | 17 ++++++++++------- 4 files changed, 26 insertions(+), 22 deletions(-) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index 19c2d46938..2d7126be2c 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -72,8 +72,7 @@ class LinkTree(object): order=[pre|post] -- Whether to do pre- or post-order traveral. - ignore= -- Optional container of root-relative - paths to ignore. + ignore= -- Predicate indicating which files to ignore. follow_nonexisting -- Whether to descend into directories in src that do not exit in dest. @@ -85,9 +84,7 @@ class LinkTree(object): raise ValueError("Order must be 'pre' or 'post'.") # List of relative paths to ignore under the src root. - ignore = kwargs.get('ignore', None) - if isinstance(ignore, basestring): - ignore = (ignore,) + ignore = kwargs.get('ignore', lambda filename: False) # Whether to descend when dirs dont' exist in dest. follow_nonexisting = kwargs.get('follow_nonexisting', True) @@ -98,7 +95,7 @@ class LinkTree(object): dest_dirpath = os.path.join(dest_root, rel_path) # Don't descend into ignored directories - if ignore and dest_dirpath in ignore: + if ignore(dest_dirpath): return # Don't descend into dirs in dest that do not exist in src. @@ -118,7 +115,7 @@ class LinkTree(object): # Ignore particular paths inside the install root. src_relpath = src_file[len(self._root):] src_relpath = src_relpath.lstrip(os.path.sep) - if ignore and src_relpath in ignore: + if ignore(src_relpath): continue yield (src_file, dest_file) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index bd63c2e0c0..43b1fcd9c8 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -995,14 +995,15 @@ class Package(object): always executed. """ - ignore_files = set(spack.install_layout.hidden_file_paths) - ignore_files.update(kwargs.get('ignore', ())) + def ignore(filename): + return (filename in spack.install_layout.hidden_file_paths or + kwargs.get('ignore', lambda f: False)(filename)) tree = LinkTree(extension.prefix) - conflict = tree.find_conflict(self.prefix, ignore=ignore_files) + conflict = tree.find_conflict(self.prefix, ignore=ignore) if conflict: raise ExtensionConflictError(conflict) - tree.merge(self.prefix, ignore=ignore_files) + tree.merge(self.prefix, ignore=ignore) def do_deactivate(self): @@ -1026,11 +1027,12 @@ class Package(object): always executed. """ - ignore_files = set(spack.install_layout.hidden_file_paths) - ignore_files.update(kwargs.get('ignore', ())) + def ignore(filename): + return (filename in spack.install_layout.hidden_file_paths or + kwargs.get('ignore', lambda f: False)(filename)) tree = LinkTree(extension.prefix) - tree.unmerge(self.prefix, ignore=ignore_files) + tree.unmerge(self.prefix, ignore=ignore) def do_clean(self): diff --git a/var/spack/packages/py-nose/package.py b/var/spack/packages/py-nose/package.py index 7bd7106b8c..6df84e831d 100644 --- a/var/spack/packages/py-nose/package.py +++ b/var/spack/packages/py-nose/package.py @@ -1,7 +1,9 @@ from spack import * class PyNose(Package): - """nose extends the test loading and running features of unittest, making it easier to write, find and run tests.""" + """nose extends the test loading and running features of unittest, + making it easier to write, find and run tests.""" + homepage = "https://pypi.python.org/pypi/nose" url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz" diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 86b903bc23..a22bd54c82 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,5 +1,6 @@ from spack import * import os +import re class Python(Package): """The Python programming language.""" @@ -59,19 +60,21 @@ class Python(Package): mkdirp(module.site_packages_dir) - def add_ignore_files(self, args): + def make_ignore(self, args): """Add some ignore files to activate/deactivate args.""" - ignore = set(args.get('ignore', ())) - ignore.add(os.path.join(self.site_packages_dir, 'site.py')) - ignore.add(os.path.join(self.site_packages_dir, 'site.pyc')) - args.update(ignore=ignore) + orig_ignore = args.get('ignore', lambda f: False) + def ignore(filename): + return (re.search(r'/site\.pyc?$', filename) or + re.search(r'\.pth$', filename) or + orig_ignore(filename)) + return ignore def activate(self, ext_pkg, **args): - self.add_ignore_files(args) + args.update(ignore=self.make_ignore(args)) super(Python, self).activate(ext_pkg, **args) def deactivate(self, ext_pkg, **args): - self.add_ignore_files(args) + args.update(ignore=self.make_ignore(args)) super(Python, self).deactivate(ext_pkg, **args) -- cgit v1.2.3-70-g09d2 From 2bc3f74df263eb92a99e5477f4ce04972eb76994 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Thu, 22 Jan 2015 11:50:01 -0800 Subject: added more Python modules --- var/spack/packages/hdf5/package.py | 6 ++++-- var/spack/packages/py-basemap/package.py | 24 +++++++++++++++++++++++ var/spack/packages/py-biopython/package.py | 14 +++++++++++++ var/spack/packages/py-gnuplot/package.py | 13 ++++++++++++ var/spack/packages/py-h5py/package.py | 18 +++++++++++++++++ var/spack/packages/py-matplotlib/package.py | 17 ++++++++++++++++ var/spack/packages/py-mpi4py/package.py | 13 ++++++++++++ var/spack/packages/py-mx/package.py | 13 ++++++++++++ var/spack/packages/py-pil/package.py | 14 +++++++++++++ var/spack/packages/py-pmw/package.py | 13 ++++++++++++ var/spack/packages/py-pylint/package.py | 16 +++++++++++++++ var/spack/packages/py-rpy2/package.py | 14 +++++++++++++ var/spack/packages/py-scientificpython/package.py | 13 ++++++++++++ var/spack/packages/py-scikit-learn/package.py | 13 ++++++++++++ var/spack/packages/py-sympy/package.py | 13 ++++++++++++ 15 files changed, 212 insertions(+), 2 deletions(-) create mode 100644 var/spack/packages/py-basemap/package.py create mode 100644 var/spack/packages/py-biopython/package.py create mode 100644 var/spack/packages/py-gnuplot/package.py create mode 100644 var/spack/packages/py-h5py/package.py create mode 100644 var/spack/packages/py-mpi4py/package.py create mode 100644 var/spack/packages/py-mx/package.py create mode 100644 var/spack/packages/py-pil/package.py create mode 100644 var/spack/packages/py-pmw/package.py create mode 100644 var/spack/packages/py-pylint/package.py create mode 100644 var/spack/packages/py-rpy2/package.py create mode 100644 var/spack/packages/py-scientificpython/package.py create mode 100644 var/spack/packages/py-scikit-learn/package.py create mode 100644 var/spack/packages/py-sympy/package.py diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py index 615c2a7fe4..992dd8ec70 100644 --- a/var/spack/packages/hdf5/package.py +++ b/var/spack/packages/hdf5/package.py @@ -18,12 +18,14 @@ class Hdf5(Package): # TODO: currently hard-coded to use OpenMPI def install(self, spec, prefix): + configure( "--prefix=%s" % prefix, "--with-zlib=%s" % spec['zlib'].prefix, "--enable-parallel", - "CC=%s" % spec['openmpi'].prefix.bin + "/mpicc", - "CXX=%s" % spec['openmpi'].prefix.bin + "/mpic++") + "--enable-shared", + "CC=%s" % spec['mpich'].prefix.bin + "/mpicc", + "CXX=%s" % spec['mpich'].prefix.bin + "/mpic++") make() make("install") diff --git a/var/spack/packages/py-basemap/package.py b/var/spack/packages/py-basemap/package.py new file mode 100644 index 0000000000..8955bf8827 --- /dev/null +++ b/var/spack/packages/py-basemap/package.py @@ -0,0 +1,24 @@ +from spack import * +import os + +class PyBasemap(Package): + """The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python.""" + homepage = "http://matplotlib.org/basemap/" + url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz" + + version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8') + + geos_version = {'1.0.7' : '3.3.3'} + + extends('python') + depends_on('py-numpy') + depends_on('py-matplotlib') + depends_on('py-pil') + + def install(self, spec, prefix): + with working_dir('geos-%s' % self.geos_version[str(self.version)]): + configure("--prefix=" + prefix) + make() + make("install") + os.environ['GEOS_DIR'] = prefix + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-biopython/package.py b/var/spack/packages/py-biopython/package.py new file mode 100644 index 0000000000..2ed04c389e --- /dev/null +++ b/var/spack/packages/py-biopython/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyBiopython(Package): + """It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics.""" + homepage = "http://biopython.org/wiki/Main_Page" + url = "http://biopython.org/DIST/biopython-1.65.tar.gz" + + version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67') + + extends('python') + depends_on('py-mx') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-gnuplot/package.py b/var/spack/packages/py-gnuplot/package.py new file mode 100644 index 0000000000..0a2c073a49 --- /dev/null +++ b/var/spack/packages/py-gnuplot/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyGnuplot(Package): + """Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program.""" + homepage = "http://gnuplot-py.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz" + + version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-h5py/package.py b/var/spack/packages/py-h5py/package.py new file mode 100644 index 0000000000..f72b3ac06e --- /dev/null +++ b/var/spack/packages/py-h5py/package.py @@ -0,0 +1,18 @@ +from spack import * +import re + +class PyH5py(Package): + """The h5py package provides both a high- and low-level interface to the HDF5 library from Python.""" + homepage = "https://pypi.python.org/pypi/h5py" + url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz" + + version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758') + + extends('python', ignore=lambda f: re.match(r'cy*', f)) + depends_on('hdf5') + depends_on('py-numpy') + depends_on('py-cython') + + def install(self, spec, prefix): + python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix) + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py index 836273c923..270a1ebfe4 100644 --- a/var/spack/packages/py-matplotlib/package.py +++ b/var/spack/packages/py-matplotlib/package.py @@ -1,4 +1,5 @@ from spack import * +import os class PyMatplotlib(Package): """Python plotting package.""" @@ -18,3 +19,19 @@ class PyMatplotlib(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) + if str(self.version) == '1.4.2': + # hack to fix configuration file + config_file = None + for p,d,f in os.walk(prefix.lib): + for file in f: + if file.find('matplotlibrc') != -1: + config_file = join_path(p, 'matplotlibrc') + print config_file + if config_file == None: + raise InstallError('could not find config file') + filter_file(r'backend : pyside', + 'backend : Qt4Agg', + config_file) + filter_file(r'#backend.qt4 : PyQt4', + 'backend.qt4 : PySide', + config_file) diff --git a/var/spack/packages/py-mpi4py/package.py b/var/spack/packages/py-mpi4py/package.py new file mode 100644 index 0000000000..fdea340dc2 --- /dev/null +++ b/var/spack/packages/py-mpi4py/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyMpi4py(Package): + """This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings.""" + homepage = "https://pypi.python.org/pypi/mpi4py" + url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz" + + version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c') + extends('python') + depends_on('mpi') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mx/package.py b/var/spack/packages/py-mx/package.py new file mode 100644 index 0000000000..717ee0562b --- /dev/null +++ b/var/spack/packages/py-mx/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyMx(Package): + """The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types.""" + homepage = "http://www.egenix.com/products/python/mxBase/" + url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz" + + version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pil/package.py b/var/spack/packages/py-pil/package.py new file mode 100644 index 0000000000..743b761981 --- /dev/null +++ b/var/spack/packages/py-pil/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyPil(Package): + """The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities.""" + + homepage = "http://www.pythonware.com/products/pil/" + url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz" + + version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pmw/package.py b/var/spack/packages/py-pmw/package.py new file mode 100644 index 0000000000..56131811e9 --- /dev/null +++ b/var/spack/packages/py-pmw/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPmw(Package): + """Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts.""" + homepage = "https://pypi.python.org/pypi/Pmw" + url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz" + + version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pylint/package.py b/var/spack/packages/py-pylint/package.py new file mode 100644 index 0000000000..ebde861f94 --- /dev/null +++ b/var/spack/packages/py-pylint/package.py @@ -0,0 +1,16 @@ +from spack import * +import re + +class PyPylint(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/pylint" + url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz" + + version('1.4.1', 'df7c679bdcce5019389038847e4de622') + +# extends('python') + extends('python', ignore=lambda f:re.match(r"site.py*", f)) + depends_on('py-nose') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-rpy2/package.py b/var/spack/packages/py-rpy2/package.py new file mode 100644 index 0000000000..3817059911 --- /dev/null +++ b/var/spack/packages/py-rpy2/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyRpy2(Package): + """rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions.""" + homepage = "https://pypi.python.org/pypi/rpy2" + url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" + + version('2.5.4', '115a20ac30883f096da2bdfcab55196d') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scientificpython/package.py b/var/spack/packages/py-scientificpython/package.py new file mode 100644 index 0000000000..73600e6cb9 --- /dev/null +++ b/var/spack/packages/py-scientificpython/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyScientificpython(Package): + """ScientificPython is a collection of Python modules for scientific computing. It contains support for geometry, mathematical functions, statistics, physical units, IO, visualization, and parallelization.""" + homepage = "https://sourcesup.renater.fr/projects/scientific-py/" + url = "https://sourcesup.renater.fr/frs/download.php/4411/ScientificPython-2.8.1.tar.gz" + + version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scikit-learn/package.py b/var/spack/packages/py-scikit-learn/package.py new file mode 100644 index 0000000000..c59c05a619 --- /dev/null +++ b/var/spack/packages/py-scikit-learn/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyScikitLearn(Package): + """""" + homepage = "https://pypi.python.org/pypi/scikit-learn" + url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz" + + version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-sympy/package.py b/var/spack/packages/py-sympy/package.py new file mode 100644 index 0000000000..c17e35b95f --- /dev/null +++ b/var/spack/packages/py-sympy/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PySympy(Package): + """SymPy is a Python library for symbolic mathematics.""" + homepage = "https://pypi.python.org/pypi/sympy" + url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz" + + version('0.7.6', '3d04753974306d8a13830008e17babca') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From 48f1ff87f836f214e72e1d02f47eac4678f0292a Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Fri, 23 Jan 2015 13:53:36 -0800 Subject: added more Python modules --- var/spack/packages/py-dateutil/package.py | 1 + var/spack/packages/py-libxml2/package.py | 13 +++++++++++++ var/spack/packages/py-matplotlib/package.py | 1 + var/spack/packages/py-pychecker/package.py | 13 +++++++++++++ var/spack/packages/py-pyqt4/package.py | 18 ++++++++++++++++++ var/spack/packages/py-sip/package.py | 15 +++++++++++++++ var/spack/packages/qt/package.py | 5 +++-- 7 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 var/spack/packages/py-libxml2/package.py create mode 100644 var/spack/packages/py-pychecker/package.py create mode 100644 var/spack/packages/py-pyqt4/package.py create mode 100644 var/spack/packages/py-sip/package.py diff --git a/var/spack/packages/py-dateutil/package.py b/var/spack/packages/py-dateutil/package.py index 96e3ecab07..11699e07ee 100644 --- a/var/spack/packages/py-dateutil/package.py +++ b/var/spack/packages/py-dateutil/package.py @@ -8,6 +8,7 @@ class PyDateutil(Package): version('2.4.0', '75714163bb96bedd07685cdb2071b8bc') extends('python') + depends_on('py-setuptools') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-libxml2/package.py b/var/spack/packages/py-libxml2/package.py new file mode 100644 index 0000000000..0dcefbd9cf --- /dev/null +++ b/var/spack/packages/py-libxml2/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyLibxml2(Package): + """A Python wrapper around libxml2.""" + homepage = "https://xmlsoft.org/python.html" + url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz" + + version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py index 270a1ebfe4..5979ceeab0 100644 --- a/var/spack/packages/py-matplotlib/package.py +++ b/var/spack/packages/py-matplotlib/package.py @@ -16,6 +16,7 @@ class PyMatplotlib(Package): depends_on('py-dateutil') depends_on('py-pytz') depends_on('py-nose') + depends_on('py-numpy') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pychecker/package.py b/var/spack/packages/py-pychecker/package.py new file mode 100644 index 0000000000..bda5a746aa --- /dev/null +++ b/var/spack/packages/py-pychecker/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPychecker(Package): + """""" + homepage = "http://pychecker.sourceforge.net/" + url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz" + + version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyqt4/package.py b/var/spack/packages/py-pyqt4/package.py new file mode 100644 index 0000000000..eeb1382560 --- /dev/null +++ b/var/spack/packages/py-pyqt4/package.py @@ -0,0 +1,18 @@ +from spack import * + +class PyPyqt4(Package): + """PyQt is a set of Python v2 and v3 bindings for Digia's Qt application framework and runs on all platforms supported by Qt including Windows, MacOS/X and Linux.""" + homepage = "http://www.riverbankcomputing.com/software/pyqt/intro" + url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz" + + version('4.11.3', '997c3e443165a89a559e0d96b061bf70') + + extends('python') + depends_on('qt') + depends_on('py-sip') + + def install(self, spec, prefix): + version_array = str(spec['python'].version).split('.') + python('configure.py', '--confirm-license', '--destdir=%s/python%s.%s/site-packages' %(self.prefix.lib, version_array[0], version_array[1])) + make() + make('install') diff --git a/var/spack/packages/py-sip/package.py b/var/spack/packages/py-sip/package.py new file mode 100644 index 0000000000..06aea35a74 --- /dev/null +++ b/var/spack/packages/py-sip/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PySip(Package): + """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries.""" + homepage = "http://www.riverbankcomputing.com/software/sip/intro" + url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz" + + version('4.16.5', '6d01ea966a53e4c7ae5c5e48c40e49e5') + + extends('python') + + def install(self, spec, prefix): + python('configure.py') + make() + make('install') diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 01f9de7f3c..6a55c89701 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -37,8 +37,9 @@ class Qt(Package): '-fast', '-optimized-qmake', '-no-pch', - '-no-phonon', - '-no-phonon-backend', +# phonon required for py-pyqt4 +# '-no-phonon', +# '-no-phonon-backend', '-no-openvg') make() make("install") -- cgit v1.2.3-70-g09d2 From 70c8bf44b8d2613c54423562f81e049fbb956780 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 26 Jan 2015 14:47:33 -0800 Subject: Fix for install sanity check -- don't count hidden dir layout files. --- lib/spack/spack/package.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 43b1fcd9c8..0b6bc4ce6c 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -836,10 +836,7 @@ class Package(object): self.install(self.spec, self.prefix) # Ensure that something was actually installed. - if not os.listdir(self.prefix): - raise InstallError( - "Install failed for %s. Nothing was installed!" - % self.name) + self._sanity_check_install() # On successful install, remove the stage. if not keep_stage: @@ -884,6 +881,15 @@ class Package(object): spack.hooks.post_install(self) + + def _sanity_check_install(self): + installed = set(os.listdir(self.prefix)) + installed.difference_update(spack.install_layout.hidden_file_paths) + if not installed: + raise InstallError( + "Install failed for %s. Nothing was installed!" % self.name) + + def do_install_dependencies(self, **kwargs): # Pass along paths of dependencies here for dep in self.spec.dependencies.values(): -- cgit v1.2.3-70-g09d2 From 6400ace90152a08a32684f97490369467ae1e37d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 26 Jan 2015 15:45:19 -0800 Subject: Add "spack extensions" command to list activated extensions. --- lib/spack/spack/cmd/extensions.py | 75 +++++++++++++++++++++++++++++++++++++++ lib/spack/spack/cmd/find.py | 69 +++++++++++++++++++++-------------- 2 files changed, 117 insertions(+), 27 deletions(-) create mode 100644 lib/spack/spack/cmd/extensions.py diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py new file mode 100644 index 0000000000..961d7e3f24 --- /dev/null +++ b/lib/spack/spack/cmd/extensions.py @@ -0,0 +1,75 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys +from external import argparse + +import llnl.util.tty as tty + +import spack +import spack.cmd +import spack.cmd.find + +description = "List extensions for package." + +def setup_parser(subparser): + format_group = subparser.add_mutually_exclusive_group() + format_group.add_argument( + '-l', '--long', action='store_const', dest='mode', const='long', + help='Show dependency hashes as well as versions.') + format_group.add_argument( + '-p', '--paths', action='store_const', dest='mode', const='paths', + help='Show paths to extension install directories') + format_group.add_argument( + '-d', '--deps', action='store_const', dest='mode', const='deps', + help='Show full dependency DAG of extensions') + + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for') + + +def extensions(parser, args): + if not args.spec: + tty.die("extensions requires a package spec.") + + spec = spack.cmd.parse_specs(args.spec) + if len(spec) > 1: + tty.die("Can only list extensions for one package.") + spec = spack.cmd.disambiguate_spec(spec[0]) + + if not spec.package.extendable: + tty.die("%s does not have extensions." % spec.short_spec) + + if not args.mode: + args.mode = 'short' + + exts = spack.install_layout.get_extensions(spec) + if not exts: + tty.msg("%s has no activated extensions." % spec.short_spec) + else: + tty.msg("Showing %d activated extension%s for package:" + % (len(exts), 's' if len(exts) > 1 else ''), + spec.short_spec) + print + spack.cmd.find.display_specs(exts, mode=args.mode) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 1de3413d42..f6f503afe5 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -41,13 +41,13 @@ description ="Find installed spack packages" def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() format_group.add_argument( - '-l', '--long', action='store_true', dest='long', + '-l', '--long', action='store_const', dest='mode', const='long', help='Show dependency hashes as well as versions.') format_group.add_argument( - '-p', '--paths', action='store_true', dest='paths', + '-p', '--paths', action='store_const', dest='mode', const='paths', help='Show paths to package install directories') format_group.add_argument( - '-d', '--deps', action='store_true', dest='full_deps', + '-d', '--deps', action='store_const', dest='mode', const='deps', help='Show full dependency DAG of installed packages') subparser.add_argument( @@ -55,26 +55,8 @@ def setup_parser(subparser): help='optional specs to filter results') -def find(parser, args): - # Filter out specs that don't exist. - query_specs = spack.cmd.parse_specs(args.query_specs) - query_specs, nonexisting = partition_list( - query_specs, lambda s: spack.db.exists(s.name)) - - if nonexisting: - msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '') - msg += ", ".join(s.name for s in nonexisting) - tty.msg(msg) - - if not query_specs: - return - - # Get all the specs the user asked for - if not query_specs: - specs = set(spack.db.installed_package_specs()) - else: - results = [set(spack.db.get_installed(qs)) for qs in query_specs] - specs = set.union(*results) +def display_specs(specs, **kwargs): + mode = kwargs.get('mode', 'short') # Make a dict with specs keyed by architecture and compiler. index = index_by(specs, ('architecture', 'compiler')) @@ -92,7 +74,7 @@ def find(parser, args): specs.sort() abbreviated = [s.format('$_$@$+', color=True) for s in specs] - if args.paths: + if mode == 'paths': # Print one spec per line along with prefix path width = max(len(s) for s in abbreviated) width += 2 @@ -101,11 +83,44 @@ def find(parser, args): for abbrv, spec in zip(abbreviated, specs): print format % (abbrv, spec.prefix) - elif args.full_deps: + elif mode == 'deps': for spec in specs: print spec.tree(indent=4, format='$_$@$+', color=True), - else: + + elif mode in ('short', 'long'): fmt = '$-_$@$+' - if args.long: + if mode == 'long': fmt += '$#' colify(s.format(fmt, color=True) for s in specs) + + else: + raise ValueError( + "Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode) + + + +def find(parser, args): + # Filter out specs that don't exist. + query_specs = spack.cmd.parse_specs(args.query_specs) + query_specs, nonexisting = partition_list( + query_specs, lambda s: spack.db.exists(s.name)) + + if nonexisting: + msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '') + msg += ", ".join(s.name for s in nonexisting) + tty.msg(msg) + + if not query_specs: + return + + # Get all the specs the user asked for + if not query_specs: + specs = set(spack.db.installed_package_specs()) + else: + results = [set(spack.db.get_installed(qs)) for qs in query_specs] + specs = set.union(*results) + + if not args.mode: + args.mode = 'short' + display_specs(specs, mode=args.mode) + -- cgit v1.2.3-70-g09d2 From 6b90017efa1f3157fe4be7d0c7b199b6e51b9fa8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 28 Jan 2015 22:05:57 -0800 Subject: Fixed dumb link_tree bug, added test for link tree. --- lib/spack/llnl/util/filesystem.py | 8 +- lib/spack/llnl/util/link_tree.py | 197 ++++++++++++++++++++++---------------- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/link_tree.py | 153 +++++++++++++++++++++++++++++ 4 files changed, 274 insertions(+), 87 deletions(-) create mode 100644 lib/spack/spack/test/link_tree.py diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 0578415653..576aeb16bd 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -23,7 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', - 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', + 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe'] import os @@ -204,6 +204,12 @@ def touch(path): os.utime(path, None) +def touchp(path): + """Like touch, but creates any parent directories needed for the file.""" + mkdirp(os.path.dirname(path)) + touch(path) + + def join_path(prefix, *args): path = str(prefix) for elt in args: diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index 2d7126be2c..887f6f4d26 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -29,108 +29,116 @@ import os import shutil from llnl.util.filesystem import mkdirp +empty_file_name = '.spack-empty' -class LinkTree(object): - """Class to create trees of symbolic links from a source directory. - LinkTree objects are constructed with a source root. Their - methods allow you to create and delete trees of symbolic links - back to the source tree in specific destination directories. - Trees comprise symlinks only to files; directries are never - symlinked to, to prevent the source directory from ever being - modified. +def traverse_tree(source_root, dest_root, rel_path='', **kwargs): + """Traverse two filesystem trees simultaneously. + + Walks the LinkTree directory in pre or post order. Yields each + file in the source directory with a matching path from the dest + directory, along with whether the file is a directory. + e.g., for this tree:: + + root/ + a/ + file1 + file2 + b/ + file3 + + When called on dest, this yields:: + + ('root', 'dest') + ('root/a', 'dest/a') + ('root/a/file1', 'dest/a/file1') + ('root/a/file2', 'dest/a/file2') + ('root/b', 'dest/b') + ('root/b/file3', 'dest/b/file3') + + Optional args: + + order=[pre|post] -- Whether to do pre- or post-order traveral. + + ignore= -- Predicate indicating which files to ignore. + + follow_nonexisting -- Whether to descend into directories in + src that do not exit in dest. Default True. + + follow_links -- Whether to descend into symlinks in src. """ - def __init__(self, source_root): - self._root = source_root + follow_nonexisting = kwargs.get('follow_nonexisting', True) + follow_links = kwargs.get('follow_link', False) + # Yield in pre or post order? + order = kwargs.get('order', 'pre') + if order not in ('pre', 'post'): + raise ValueError("Order must be 'pre' or 'post'.") - def traverse(self, dest_root, **kwargs): - """Traverse LinkTree root and dest simultaneously. + # List of relative paths to ignore under the src root. + ignore = kwargs.get('ignore', lambda filename: False) - Walks the LinkTree directory in pre or post order. Yields - each file in the source directory with a matching path from - the dest directory. e.g., for this tree:: + # Don't descend into ignored directories + if ignore(rel_path): + return - root/ - a/ - file1 - file2 - b/ - file3 + source_path = os.path.join(source_root, rel_path) + dest_path = os.path.join(dest_root, rel_path) - When called on dest, this yields:: + # preorder yields directories before children + if order == 'pre': + yield (source_path, dest_path) - ('root', 'dest') - ('root/a', 'dest/a') - ('root/a/file1', 'dest/a/file1') - ('root/a/file2', 'dest/a/file2') - ('root/b', 'dest/b') - ('root/b/file3', 'dest/b/file3') + for f in os.listdir(source_path): + source_child = os.path.join(source_path, f) + dest_child = os.path.join(dest_path, f) - Optional args: + # Treat as a directory + if os.path.isdir(source_child) and ( + follow_links or not os.path.islink(source_child)): - order=[pre|post] -- Whether to do pre- or post-order traveral. + # When follow_nonexisting isn't set, don't descend into dirs + # in source that do not exist in dest + if follow_nonexisting or os.path.exists(dest_child): + tuples = traverse_tree(source_child, dest_child, rel_path, **kwargs) + for t in tuples: yield t - ignore= -- Predicate indicating which files to ignore. + # Treat as a file. + elif not ignore(os.path.join(rel_path, f)): + yield (source_child, dest_child) + + if order == 'post': + yield (source_path, dest_path) - follow_nonexisting -- Whether to descend into directories in - src that do not exit in dest. - """ - # Yield directories before or after their contents. - order = kwargs.get('order', 'pre') - if order not in ('pre', 'post'): - raise ValueError("Order must be 'pre' or 'post'.") - - # List of relative paths to ignore under the src root. - ignore = kwargs.get('ignore', lambda filename: False) - - # Whether to descend when dirs dont' exist in dest. - follow_nonexisting = kwargs.get('follow_nonexisting', True) - - for dirpath, dirnames, filenames in os.walk(self._root): - rel_path = dirpath[len(self._root):] - rel_path = rel_path.lstrip(os.path.sep) - dest_dirpath = os.path.join(dest_root, rel_path) - - # Don't descend into ignored directories - if ignore(dest_dirpath): - return - - # Don't descend into dirs in dest that do not exist in src. - if not follow_nonexisting: - dirnames[:] = [ - d for d in dirnames - if os.path.exists(os.path.join(dest_dirpath, d))] - - # preorder yields directories before children - if order == 'pre': - yield (dirpath, dest_dirpath) - - for name in filenames: - src_file = os.path.join(dirpath, name) - dest_file = os.path.join(dest_dirpath, name) - - # Ignore particular paths inside the install root. - src_relpath = src_file[len(self._root):] - src_relpath = src_relpath.lstrip(os.path.sep) - if ignore(src_relpath): - continue - yield (src_file, dest_file) +class LinkTree(object): + """Class to create trees of symbolic links from a source directory. - # postorder yields directories after children - if order == 'post': - yield (dirpath, dest_dirpath) + LinkTree objects are constructed with a source root. Their + methods allow you to create and delete trees of symbolic links + back to the source tree in specific destination directories. + Trees comprise symlinks only to files; directries are never + symlinked to, to prevent the source directory from ever being + modified. + + """ + def __init__(self, source_root): + if not os.path.exists(source_root): + raise IOError("No such file or directory: '%s'", source_root) + self._root = source_root def find_conflict(self, dest_root, **kwargs): - """Returns the first file in dest that also exists in src.""" + """Returns the first file in dest that conflicts with src""" kwargs['follow_nonexisting'] = False - for src, dest in self.traverse(dest_root, **kwargs): - if os.path.exists(dest) and not os.path.isdir(dest): + for src, dest in traverse_tree(self._root, dest_root, **kwargs): + if os.path.isdir(src): + if os.path.exists(dest) and not os.path.isdir(dest): + return dest + elif os.path.exists(dest): return dest return None @@ -138,9 +146,20 @@ class LinkTree(object): def merge(self, dest_root, **kwargs): """Link all files in src into dest, creating directories if necessary.""" kwargs['order'] = 'pre' - for src, dest in self.traverse(dest_root, **kwargs): + for src, dest in traverse_tree(self._root, dest_root, **kwargs): if os.path.isdir(src): - mkdirp(dest) + if not os.path.exists(dest): + mkdirp(dest) + continue + + if not os.path.isdir(dest): + raise ValueError("File blocks directory: %s" % dest) + + # mark empty directories so they aren't removed on unmerge. + if not os.listdir(dest): + marker = os.path.join(dest, empty_file_name) + touch(marker) + else: assert(not os.path.exists(dest)) os.symlink(src, dest) @@ -153,12 +172,20 @@ class LinkTree(object): """ kwargs['order'] = 'post' - for src, dest in self.traverse(dest_root, **kwargs): - if os.path.isdir(dest): + for src, dest in traverse_tree(self._root, dest_root, **kwargs): + if os.path.isdir(src): + if not os.path.isdir(dest): + raise ValueError("File blocks directory: %s" % dest) + + # remove directory if it is empty. if not os.listdir(dest): - # TODO: what if empty directories were present pre-merge? shutil.rmtree(dest, ignore_errors=True) + # remove empty dir marker if present. + marker = os.path.join(dest, empty_file_name) + if os.path.exists(marker): + os.remove(marker) + elif os.path.exists(dest): if not os.path.islink(dest): raise ValueError("%s is not a link tree!" % dest) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 0eda667abc..c53e6774fc 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -51,7 +51,8 @@ test_names = ['versions', 'hg_fetch', 'mirror', 'url_extrapolate', - 'cc'] + 'cc', + 'link_tree'] def list_tests(): diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py new file mode 100644 index 0000000000..bc7c2c6b5e --- /dev/null +++ b/lib/spack/spack/test/link_tree.py @@ -0,0 +1,153 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import unittest +import shutil +import tempfile +from contextlib import closing + +from llnl.util.filesystem import * +from llnl.util.link_tree import LinkTree + +from spack.stage import Stage + + +class LinkTreeTest(unittest.TestCase): + """Tests Spack's LinkTree class.""" + + def setUp(self): + self.stage = Stage('link-tree-test') + + with working_dir(self.stage.path): + touchp('source/1') + touchp('source/a/b/2') + touchp('source/a/b/3') + touchp('source/c/4') + touchp('source/c/d/5') + touchp('source/c/d/6') + touchp('source/c/d/e/7') + + source_path = os.path.join(self.stage.path, 'source') + self.link_tree = LinkTree(source_path) + + + def tearDown(self): + if self.stage: + self.stage.destroy() + + + def check_file_link(self, filename): + self.assertTrue(os.path.isfile(filename)) + self.assertTrue(os.path.islink(filename)) + + + def check_dir(self, filename): + self.assertTrue(os.path.isdir(filename)) + + + def test_merge_to_new_directory(self): + with working_dir(self.stage.path): + self.link_tree.merge('dest') + + self.check_file_link('dest/1') + self.check_file_link('dest/a/b/2') + self.check_file_link('dest/a/b/3') + self.check_file_link('dest/c/4') + self.check_file_link('dest/c/d/5') + self.check_file_link('dest/c/d/6') + self.check_file_link('dest/c/d/e/7') + + self.link_tree.unmerge('dest') + + self.assertFalse(os.path.exists('dest')) + + + def test_merge_to_existing_directory(self): + with working_dir(self.stage.path): + + touchp('dest/x') + touchp('dest/a/b/y') + + self.link_tree.merge('dest') + + self.check_file_link('dest/1') + self.check_file_link('dest/a/b/2') + self.check_file_link('dest/a/b/3') + self.check_file_link('dest/c/4') + self.check_file_link('dest/c/d/5') + self.check_file_link('dest/c/d/6') + self.check_file_link('dest/c/d/e/7') + + self.assertTrue(os.path.isfile('dest/x')) + self.assertTrue(os.path.isfile('dest/a/b/y')) + + self.link_tree.unmerge('dest') + + self.assertTrue(os.path.isfile('dest/x')) + self.assertTrue(os.path.isfile('dest/a/b/y')) + + self.assertFalse(os.path.isfile('dest/1')) + self.assertFalse(os.path.isfile('dest/a/b/2')) + self.assertFalse(os.path.isfile('dest/a/b/3')) + self.assertFalse(os.path.isfile('dest/c/4')) + self.assertFalse(os.path.isfile('dest/c/d/5')) + self.assertFalse(os.path.isfile('dest/c/d/6')) + self.assertFalse(os.path.isfile('dest/c/d/e/7')) + + + def test_merge_with_empty_directories(self): + with working_dir(self.stage.path): + mkdirp('dest/f/g') + mkdirp('dest/a/b/h') + + self.link_tree.merge('dest') + self.link_tree.unmerge('dest') + + self.assertFalse(os.path.exists('dest/1')) + self.assertFalse(os.path.exists('dest/a/b/2')) + self.assertFalse(os.path.exists('dest/a/b/3')) + self.assertFalse(os.path.exists('dest/c/4')) + self.assertFalse(os.path.exists('dest/c/d/5')) + self.assertFalse(os.path.exists('dest/c/d/6')) + self.assertFalse(os.path.exists('dest/c/d/e/7')) + + self.assertTrue(os.path.isdir('dest/a/b/h')) + self.assertTrue(os.path.isdir('dest/f/g')) + + + def test_ignore(self): + with working_dir(self.stage.path): + touchp('source/.spec') + touchp('dest/.spec') + + self.link_tree.merge('dest', ignore=lambda x: x == '.spec') + self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec') + + self.assertFalse(os.path.exists('dest/1')) + self.assertFalse(os.path.exists('dest/a')) + self.assertFalse(os.path.exists('dest/c')) + + self.assertTrue(os.path.isfile('source/.spec')) + self.assertTrue(os.path.isfile('dest/.spec')) -- cgit v1.2.3-70-g09d2 From 2d9190d264dd276853aca41998fffbab1baecdb0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 2 Feb 2015 06:09:35 -0800 Subject: Add extensions command. --- lib/spack/llnl/util/link_tree.py | 5 ++- lib/spack/spack/cmd/extensions.py | 9 ++-- lib/spack/spack/directory_layout.py | 2 +- lib/spack/spack/package.py | 5 +++ lib/spack/spack/packages.py | 5 +++ lib/spack/spack/spec.py | 7 +++ var/spack/packages/py-basemap/package.py | 1 + var/spack/packages/python/package.py | 77 +++++++++++++++++++++++++++++--- 8 files changed, 99 insertions(+), 12 deletions(-) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index 887f6f4d26..4e4e48316e 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -27,7 +27,7 @@ __all__ = ['LinkTree'] import os import shutil -from llnl.util.filesystem import mkdirp +from llnl.util.filesystem import * empty_file_name = '.spack-empty' @@ -93,6 +93,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): for f in os.listdir(source_path): source_child = os.path.join(source_path, f) dest_child = os.path.join(dest_path, f) + rel_child = os.path.join(rel_path, f) # Treat as a directory if os.path.isdir(source_child) and ( @@ -101,7 +102,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): - tuples = traverse_tree(source_child, dest_child, rel_path, **kwargs) + tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) for t in tuples: yield t # Treat as a file. diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 961d7e3f24..f28a388bf2 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -26,6 +26,7 @@ import sys from external import argparse import llnl.util.tty as tty +from llnl.util.tty.colify import colify import spack import spack.cmd @@ -66,10 +67,10 @@ def extensions(parser, args): exts = spack.install_layout.get_extensions(spec) if not exts: - tty.msg("%s has no activated extensions." % spec.short_spec) + tty.msg("%s has no activated extensions." % spec.cshort_spec) else: - tty.msg("Showing %d activated extension%s for package:" - % (len(exts), 's' if len(exts) > 1 else ''), - spec.short_spec) + tty.msg("Extensions for package %s:" % spec.cshort_spec) + colify(pkg.name for pkg in spack.db.extensions_for(spec)) print + tty.msg("%d currently activated:" % len(exts)) spack.cmd.find.display_specs(exts, mode=args.mode) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index ff327ed504..efc40a17a4 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -269,8 +269,8 @@ class SpecHashDirectoryLayout(DirectoryLayout): def get_extensions(self, spec): _check_concrete(spec) - path = self.extension_file_path(spec) extensions = set() + path = self.extension_file_path(spec) if os.path.exists(path): with closing(open(path)) as ext_file: for line in ext_file: diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 0b6bc4ce6c..b905968540 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -522,6 +522,11 @@ class Package(object): return len(self.extendees) > 0 + def extends(self, spec): + return (spec.name in self.extendees and + spec.satisfies(self.extendees[spec.name][0])) + + @property def activated(self): if not self.spec.concrete: diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index bb5a94bcab..b3049e812f 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -112,6 +112,11 @@ class PackageDB(object): return providers + @_autospec + def extensions_for(self, extendee_spec): + return [p for p in self.all_packages() if p.extends(extendee_spec)] + + def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 2f4fe9ca24..dffdccaddb 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -552,6 +552,13 @@ class Spec(object): return self.format('$_$@$%@$+$=$#') + @property + def cshort_spec(self): + """Returns a version of the spec with the dependencies hashed + instead of completely enumerated.""" + return self.format('$_$@$%@$+$=$#', color=True) + + @property def prefix(self): return Prefix(spack.install_layout.path_for_spec(self)) diff --git a/var/spack/packages/py-basemap/package.py b/var/spack/packages/py-basemap/package.py index 8955bf8827..7b6d8e7e65 100644 --- a/var/spack/packages/py-basemap/package.py +++ b/var/spack/packages/py-basemap/package.py @@ -11,6 +11,7 @@ class PyBasemap(Package): geos_version = {'1.0.7' : '3.3.3'} extends('python') + depends_on('py-setuptools') depends_on('py-numpy') depends_on('py-matplotlib') depends_on('py-pil') diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index a22bd54c82..8a6d574d9b 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,6 +1,9 @@ from spack import * +import spack import os import re +from contextlib import closing + class Python(Package): """The Python programming language.""" @@ -29,6 +32,10 @@ class Python(Package): make("install") + # ======================================================================== + # Set up environment to make install easy for python extensions. + # ======================================================================== + @property def python_lib_dir(self): return os.path.join('lib', 'python%d.%d' % self.version[:2]) @@ -60,21 +67,81 @@ class Python(Package): mkdirp(module.site_packages_dir) - def make_ignore(self, args): + # ======================================================================== + # Handle specifics of activating and deactivating python modules. + # ======================================================================== + + def python_ignore(self, ext_pkg, args): """Add some ignore files to activate/deactivate args.""" orig_ignore = args.get('ignore', lambda f: False) + def ignore(filename): - return (re.search(r'/site\.pyc?$', filename) or - re.search(r'\.pth$', filename) or + # Always ignore easy-install.pth, as it needs to be merged. + patterns = [r'easy-install\.pth$'] + + # Ignore pieces of setuptools installed by other packages. + if ext_pkg.name != 'py-setuptools': + patterns.append(r'/site\.pyc?$') + patterns.append(r'setuptools\.pth') + patterns.append(r'bin/easy_install[^/]*$') + patterns.append(r'setuptools.*egg$') + + return (any(re.search(p, filename) for p in patterns) or orig_ignore(filename)) + return ignore + def write_easy_install_pth(self, extensions): + paths = [] + for ext in extensions: + ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir) + easy_pth = "%s/easy-install.pth" % ext_site_packages + + if not os.path.isfile(easy_pth): + continue + + with closing(open(easy_pth)) as f: + for line in f: + line = line.rstrip() + + # Skip lines matching these criteria + if not line: continue + if re.search(r'^(import|#)', line): continue + if (ext.name != 'py-setuptools' and + re.search(r'setuptools.*egg$', line)): continue + + paths.append(line) + + site_packages = os.path.join(self.prefix, self.site_packages_dir) + main_pth = "%s/easy-install.pth" % site_packages + + if not paths: + if os.path.isfile(main_pth): + os.remove(main_pth) + + else: + with closing(open(main_pth, 'w')) as f: + f.write("import sys; sys.__plen = len(sys.path)\n") + for path in paths: + f.write("%s\n" % path) + f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; " + "p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n") + + def activate(self, ext_pkg, **args): - args.update(ignore=self.make_ignore(args)) + args.update(ignore=self.python_ignore(ext_pkg, args)) super(Python, self).activate(ext_pkg, **args) + extensions = set(spack.install_layout.get_extensions(self.spec)) + extensions.add(ext_pkg.spec) + self.write_easy_install_pth(extensions) + def deactivate(self, ext_pkg, **args): - args.update(ignore=self.make_ignore(args)) + args.update(ignore=self.python_ignore(ext_pkg, args)) super(Python, self).deactivate(ext_pkg, **args) + + extensions = set(spack.install_layout.get_extensions(self.spec)) + extensions.remove(ext_pkg.spec) + self.write_easy_install_pth(extensions) -- cgit v1.2.3-70-g09d2 From 5bde8359e8378bf8595a4bef343d1f50258f663d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 2 Feb 2015 07:58:52 -0800 Subject: More information in extensions command. --- lib/spack/spack/cmd/extensions.py | 39 ++++++++++++++++++++++++++++++--------- lib/spack/spack/packages.py | 6 ++++++ 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index f28a388bf2..f6ccd7b515 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -54,9 +54,14 @@ def extensions(parser, args): if not args.spec: tty.die("extensions requires a package spec.") + # Checks spec = spack.cmd.parse_specs(args.spec) if len(spec) > 1: tty.die("Can only list extensions for one package.") + + if not spec[0].package.extendable: + tty.die("%s is not an extendable package." % spec[0].name) + spec = spack.cmd.disambiguate_spec(spec[0]) if not spec.package.extendable: @@ -65,12 +70,28 @@ def extensions(parser, args): if not args.mode: args.mode = 'short' - exts = spack.install_layout.get_extensions(spec) - if not exts: - tty.msg("%s has no activated extensions." % spec.cshort_spec) - else: - tty.msg("Extensions for package %s:" % spec.cshort_spec) - colify(pkg.name for pkg in spack.db.extensions_for(spec)) - print - tty.msg("%d currently activated:" % len(exts)) - spack.cmd.find.display_specs(exts, mode=args.mode) + # List package names of extensions + extensions = spack.db.extensions_for(spec) + if not extensions: + tty.msg("%s has no extensions." % spec.cshort_spec) + return + tty.msg("%s extensions:" % spec.cshort_spec) + colify(ext.name for ext in extensions) + + # List specs of installed extensions. + installed = [s.spec for s in spack.db.installed_extensions_for(spec)] + print + if not installed: + tty.msg("None activated.") + return + tty.msg("%d installed:" % len(installed)) + spack.cmd.find.display_specs(installed, mode=args.mode) + + # List specs of activated extensions. + activated = spack.install_layout.get_extensions(spec) + print + if not activated: + tty.msg("None activated.") + return + tty.msg("%d currently activated:" % len(exts)) + spack.cmd.find.display_specs(installed, mode=args.mode) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index b3049e812f..7ef8135c1a 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -117,6 +117,12 @@ class PackageDB(object): return [p for p in self.all_packages() if p.extends(extendee_spec)] + @_autospec + def installed_extensions_for(self, extendee_spec): + return [s.package for s in self.installed_package_specs() + if s.package.extends(extendee_spec)] + + def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" -- cgit v1.2.3-70-g09d2 From a9e189972a547fb5f7996f0f5acafc1200702b72 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Feb 2015 15:42:41 -0800 Subject: Bugfix in spak extensions --- lib/spack/spack/cmd/extensions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index f6ccd7b515..3b189895b0 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -93,5 +93,5 @@ def extensions(parser, args): if not activated: tty.msg("None activated.") return - tty.msg("%d currently activated:" % len(exts)) - spack.cmd.find.display_specs(installed, mode=args.mode) + tty.msg("%d currently activated:" % len(activated)) + spack.cmd.find.display_specs(activated, mode=args.mode) -- cgit v1.2.3-70-g09d2 From 3a3e4d4391977327bca18c416205971dd6a85509 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Feb 2015 15:47:03 -0800 Subject: Do not automatically activate extensions on install. --- lib/spack/spack/hooks/extensions.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index 718b24b965..9d6fa23d03 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -26,15 +26,11 @@ import spack -def post_install(pkg): - if pkg.is_extension: - pkg.do_activate() - - def pre_uninstall(pkg): # Need to do this b/c uninstall does not automatically do it. # TODO: store full graph info in stored .spec file. pkg.spec.normalize() if pkg.is_extension: - pkg.do_deactivate() + if pkg.activated: + pkg.do_deactivate() -- cgit v1.2.3-70-g09d2 From 457f2d1d51b2bf008f837bf3ce8ee3cb47d5ba6b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 6 Feb 2015 08:37:22 -0800 Subject: Fix libpng to use a better URL Sourceforge URLs like this eventually die when the libpng version is bumped: http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download But ones like this give you a "permanently moved", which curl -L will follow: http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz --- var/spack/packages/libpng/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/libpng/package.py b/var/spack/packages/libpng/package.py index a6d9bf0b46..c148a3d58c 100644 --- a/var/spack/packages/libpng/package.py +++ b/var/spack/packages/libpng/package.py @@ -3,12 +3,13 @@ from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" - url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download" + url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + version('1.6.15', '829a256f3de9307731d4f52dc071916d') + version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) - make() make("install") -- cgit v1.2.3-70-g09d2 From 5cc369c2b831446f1afaaba41cbf0dbdba75b4ed Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Fri, 6 Feb 2015 16:35:35 -0800 Subject: add dependent packages to PYTHONPATH for build --- var/spack/packages/python/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 8a6d574d9b..23b528b089 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -66,6 +66,11 @@ class Python(Package): # Make the site packages directory if it does not exist already. mkdirp(module.site_packages_dir) + # Add dependent packages' site-packages directory to PYTHONPATH + for d in ext_spec.traverse(): + if d.package.extends(self.spec): + os.environ['PYTHONPATH'] += ':' + os.path.join(d.prefix, self.site_packages_dir) + # ======================================================================== # Handle specifics of activating and deactivating python modules. -- cgit v1.2.3-70-g09d2 From d95d48bbe69eed2772bea8d155ccc4f8d708e4d2 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Fri, 6 Feb 2015 16:41:43 -0800 Subject: py-mako and fix for setup-env.sh --- share/spack/setup-env.sh | 4 ++-- var/spack/packages/py-mako/package.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 var/spack/packages/py-mako/package.py diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 91b1dc4630..b2bcbaf6c7 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -165,8 +165,8 @@ fi # # Set up modules and dotkit search paths in the user environment # -_sp_share_dir="$(dirname $_sp_source_file)" -_sp_prefix="$(dirname $(dirname $_sp_share_dir))" +_sp_share_dir=$(cd "$(dirname $_sp_source_file)" && pwd) +_sp_prefix=$(cd "$(dirname $(dirname $_sp_share_dir))" && pwd) # TODO: fix SYS_TYPE to something non-LLNL-specific _spack_pathadd DK_NODE "$_sp_share_dir/dotkit/$SYS_TYPE" diff --git a/var/spack/packages/py-mako/package.py b/var/spack/packages/py-mako/package.py new file mode 100644 index 0000000000..3e91ffd8e5 --- /dev/null +++ b/var/spack/packages/py-mako/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyMako(Package): + """A super-fast templating language that borrows the best + ideas from the existing templating languages.""" + + homepage = "https://pypi.python.org/pypi/mako" + url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz" + + version('1.0.1', '9f0aafd177b039ef67b90ea350497a54') + + depends_on('py-setuptools') + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From 5fdf5438ea4375dd5d9ceee7c5af1a4cf6896d53 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Fri, 6 Feb 2015 16:55:48 -0800 Subject: flex and bison --- var/spack/packages/bison/package.py | 17 +++++++++++++++++ var/spack/packages/flex/package.py | 15 +++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 var/spack/packages/bison/package.py create mode 100644 var/spack/packages/flex/package.py diff --git a/var/spack/packages/bison/package.py b/var/spack/packages/bison/package.py new file mode 100644 index 0000000000..7c526fb958 --- /dev/null +++ b/var/spack/packages/bison/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Bison(Package): + """Bison is a general-purpose parser generator that converts + an annotated context-free grammar into a deterministic LR or + generalized LR (GLR) parser employing LALR(1) parser tables.""" + + homepage = "http://www.gnu.org/software/bison/" + url = "http://ftp.gnu.org/gnu/bison/bison-3.0.tar.gz" + + version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/packages/flex/package.py b/var/spack/packages/flex/package.py new file mode 100644 index 0000000000..b065904912 --- /dev/null +++ b/var/spack/packages/flex/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Flex(Package): + """Flex is a tool for generating scanners.""" + + homepage = "http://flex.sourceforge.net/" + url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz" + + version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") -- cgit v1.2.3-70-g09d2 From 676cc84c9e0a845fab117fde4ff0b1dca0b2d792 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Fri, 6 Feb 2015 17:24:55 -0800 Subject: more mesa dependencies --- var/spack/packages/dri2proto/package.py | 14 ++++++++++++++ var/spack/packages/libdrm/package.py | 22 ++++++++++++++++++++++ var/spack/packages/libxcb/package.py | 21 +++++++++++++++++++++ var/spack/packages/libxshmfence/package.py | 16 ++++++++++++++++ var/spack/packages/xcb-proto/package.py | 15 +++++++++++++++ 5 files changed, 88 insertions(+) create mode 100644 var/spack/packages/dri2proto/package.py create mode 100644 var/spack/packages/libdrm/package.py create mode 100644 var/spack/packages/libxcb/package.py create mode 100644 var/spack/packages/libxshmfence/package.py create mode 100644 var/spack/packages/xcb-proto/package.py diff --git a/var/spack/packages/dri2proto/package.py b/var/spack/packages/dri2proto/package.py new file mode 100644 index 0000000000..11dfa568e2 --- /dev/null +++ b/var/spack/packages/dri2proto/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Dri2proto(Package): + """DRI2 Protocol Headers.""" + homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/" + url = "http://xorg.freedesktop.org/releases/individual/proto/dri2proto-2.8.tar.gz" + + version('2.8', '19ea18f63d8ae8053c9fa84b60365b77') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/packages/libdrm/package.py b/var/spack/packages/libdrm/package.py new file mode 100644 index 0000000000..9164db8141 --- /dev/null +++ b/var/spack/packages/libdrm/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Libdrm(Package): + """A userspace library for accessing the DRM, direct + rendering manager, on Linux, BSD and other operating + systems that support the ioctl interface.""" + + homepage = "http://dri.freedesktop.org/libdrm/" # no real website... + url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" + + version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/packages/libxcb/package.py b/var/spack/packages/libxcb/package.py new file mode 100644 index 0000000000..521cd0d475 --- /dev/null +++ b/var/spack/packages/libxcb/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Libxcb(Package): + """The X protocol C-language Binding (XCB) is a replacement + for Xlib featuring a small footprint, latency hiding, direct + access to the protocol, improved threading support, and + extensibility.""" + + homepage = "http://xcb.freedesktop.org/" + url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz" + + version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb') + + depends_on("python") + depends_on("xcb-proto") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/packages/libxshmfence/package.py b/var/spack/packages/libxshmfence/package.py new file mode 100644 index 0000000000..3aa2448b46 --- /dev/null +++ b/var/spack/packages/libxshmfence/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libxshmfence(Package): + """This is a tiny library that exposes a event API on top of Linux + futexes.""" + + homepage = "http://keithp.com/blogs/dri3_extension/" # not really... + url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz" + + version('1.2', 'f0b30c0fc568b22ec524859ee28556f1') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/packages/xcb-proto/package.py b/var/spack/packages/xcb-proto/package.py new file mode 100644 index 0000000000..17a94bd892 --- /dev/null +++ b/var/spack/packages/xcb-proto/package.py @@ -0,0 +1,15 @@ +from spack import * + +class XcbProto(Package): + """Protocol for libxcb""" + + homepage = "http://xcb.freedesktop.org/" + url = "http://xcb.freedesktop.org/dist/xcb-proto-1.11.tar.gz" + + version('1.11', 'c8c6cb72c84f58270f4db1f39607f66a') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") -- cgit v1.2.3-70-g09d2 From 932f3930f43e10f22c06ede6bb963a23989443c1 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Sat, 7 Feb 2015 09:18:34 -0800 Subject: util-linux added --- var/spack/packages/libdrm/package.py | 5 ----- var/spack/packages/util-linux/package.py | 20 ++++++++++++++++++++ 2 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 var/spack/packages/util-linux/package.py diff --git a/var/spack/packages/libdrm/package.py b/var/spack/packages/libdrm/package.py index 9164db8141..3a657e8ecc 100644 --- a/var/spack/packages/libdrm/package.py +++ b/var/spack/packages/libdrm/package.py @@ -10,13 +10,8 @@ class Libdrm(Package): version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") - def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure("--prefix=%s" % prefix) - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/packages/util-linux/package.py b/var/spack/packages/util-linux/package.py new file mode 100644 index 0000000000..cb7ceabf57 --- /dev/null +++ b/var/spack/packages/util-linux/package.py @@ -0,0 +1,20 @@ +from spack import * +import os + +class UtilLinux(Package): + """Util-linux is a suite of essential utilities for any Linux system.""" + + homepage = "http://freecode.com/projects/util-linux" + url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.25/util-linux-2.25.tar.gz" + + version('2.25', 'f6d7fc6952ec69c4dc62c8d7c59c1d57') + + depends_on("python@2.7:") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib, + "--disable-use-tty-group") + + make() + make("install") -- cgit v1.2.3-70-g09d2 From 1605e04d444583284a19462206755ecacb219d3b Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Sat, 7 Feb 2015 22:08:50 -0800 Subject: mesa and systemd (systemd not working yet) --- var/spack/packages/mesa/package.py | 26 ++++++++++++++++++++++++ var/spack/packages/systemd/package.py | 38 +++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 var/spack/packages/mesa/package.py create mode 100644 var/spack/packages/systemd/package.py diff --git a/var/spack/packages/mesa/package.py b/var/spack/packages/mesa/package.py new file mode 100644 index 0000000000..df28d87700 --- /dev/null +++ b/var/spack/packages/mesa/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Mesa(Package): + """Mesa is an open-source implementation of the OpenGL + specification - a system for rendering interactive 3D graphics.""" + + homepage = "http://www.mesa3d.org" + url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" + + version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') + + depends_on("py-mako") + depends_on("flex") + depends_on("bison") + depends_on("libdrm") + depends_on("dri2proto") + depends_on("libxcb") + depends_on("libxshmfence") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/packages/systemd/package.py b/var/spack/packages/systemd/package.py new file mode 100644 index 0000000000..647b3bda7b --- /dev/null +++ b/var/spack/packages/systemd/package.py @@ -0,0 +1,38 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install systemd +# +# You can always get back here to change things with: +# +# spack edit systemd +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Systemd(Package): + """Systemd is a suite of basic building blocks for a Linux system.""" + + homepage = "http://www.freedesktop.org/wiki/Software/systemd/" + url = "http://www.freedesktop.org/software/systemd/systemd-218.tar.xz" + + version('218', '4e2c511b0a7932d7fc9d79822273aac6') + + depends_on("gperf") + depends_on("coreutils@8.16:") # ln --relative + depends_on("util-linux") # libmount + depends_on("python@2.7:") + depends_on("gcc@4.5:") # pragma gcc diagnostic not allowed inside functions + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "CC=%s/gcc -std=gnu99" % spec['gcc'].prefix.bin) + + make() + make("install") -- cgit v1.2.3-70-g09d2 From cc684a3ebeaa939b9ae87474991e210aab7c3185 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Sun, 8 Feb 2015 13:34:45 -0800 Subject: older mesa for 2.6 kernel (not workin yet) --- var/spack/packages/mesa/package.py | 8 ++++---- var/spack/packages/systemd/package.py | 38 ----------------------------------- 2 files changed, 4 insertions(+), 42 deletions(-) delete mode 100644 var/spack/packages/systemd/package.py diff --git a/var/spack/packages/mesa/package.py b/var/spack/packages/mesa/package.py index df28d87700..6da13e52b2 100644 --- a/var/spack/packages/mesa/package.py +++ b/var/spack/packages/mesa/package.py @@ -5,9 +5,11 @@ class Mesa(Package): specification - a system for rendering interactive 3D graphics.""" homepage = "http://www.mesa3d.org" - url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" + url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/7.x/7.11.2/MesaLib-7.11.2.tar.gz" + # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" - version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') + # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') + version('7.11.2', 'b9e84efee3931c0acbccd1bb5a860554') depends_on("py-mako") depends_on("flex") @@ -18,9 +20,7 @@ class Mesa(Package): depends_on("libxshmfence") def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure("--prefix=%s" % prefix) - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/packages/systemd/package.py b/var/spack/packages/systemd/package.py deleted file mode 100644 index 647b3bda7b..0000000000 --- a/var/spack/packages/systemd/package.py +++ /dev/null @@ -1,38 +0,0 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install systemd -# -# You can always get back here to change things with: -# -# spack edit systemd -# -# See the spack documentation for more information on building -# packages. -# -from spack import * - -class Systemd(Package): - """Systemd is a suite of basic building blocks for a Linux system.""" - - homepage = "http://www.freedesktop.org/wiki/Software/systemd/" - url = "http://www.freedesktop.org/software/systemd/systemd-218.tar.xz" - - version('218', '4e2c511b0a7932d7fc9d79822273aac6') - - depends_on("gperf") - depends_on("coreutils@8.16:") # ln --relative - depends_on("util-linux") # libmount - depends_on("python@2.7:") - depends_on("gcc@4.5:") # pragma gcc diagnostic not allowed inside functions - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "CC=%s/gcc -std=gnu99" % spec['gcc'].prefix.bin) - - make() - make("install") -- cgit v1.2.3-70-g09d2 From 9e878075aceace1439e85b1d75770a7a6f248566 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Sun, 8 Feb 2015 16:09:13 -0800 Subject: mesa 8.0.5 working --- var/spack/packages/libdrm/package.py | 1 + var/spack/packages/llvm/package.py | 6 ++++++ var/spack/packages/mesa/package.py | 27 +++++++++++++++++---------- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/var/spack/packages/libdrm/package.py b/var/spack/packages/libdrm/package.py index 3a657e8ecc..00736b7811 100644 --- a/var/spack/packages/libdrm/package.py +++ b/var/spack/packages/libdrm/package.py @@ -9,6 +9,7 @@ class Libdrm(Package): url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') + version('2.4.33', '86e4e3debe7087d5404461e0032231c8') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py index c7a10df55a..69354a5c90 100644 --- a/var/spack/packages/llvm/package.py +++ b/var/spack/packages/llvm/package.py @@ -35,6 +35,12 @@ class Llvm(Package): url = "http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz" version('3.4.2', 'a20669f75967440de949ac3b1bad439c') + version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', + url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') + version('2.9', '793138412d2af2c7c7f54615f8943771', + url='http://llvm.org/releases/2.9/llvm-2.9.tgz') + version('2.8', '220d361b4d17051ff4bb21c64abe05ba', + url='http://llvm.org/releases/2.8/llvm-2.8.tgz') def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag diff --git a/var/spack/packages/mesa/package.py b/var/spack/packages/mesa/package.py index 6da13e52b2..2dba878a77 100644 --- a/var/spack/packages/mesa/package.py +++ b/var/spack/packages/mesa/package.py @@ -5,19 +5,26 @@ class Mesa(Package): specification - a system for rendering interactive 3D graphics.""" homepage = "http://www.mesa3d.org" - url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/7.x/7.11.2/MesaLib-7.11.2.tar.gz" + url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz" # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') - version('7.11.2', 'b9e84efee3931c0acbccd1bb5a860554') - - depends_on("py-mako") - depends_on("flex") - depends_on("bison") - depends_on("libdrm") - depends_on("dri2proto") - depends_on("libxcb") - depends_on("libxshmfence") + version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2') + + # mesa 7.x, 8.x, 9.x + depends_on("libdrm@2.4.33") + depends_on("llvm@3.0") + + # patch("llvm-fixes.patch") # using newer llvm + + # mesa 10.x + # depends_on("py-mako") + # depends_on("flex") + # depends_on("bison") + # depends_on("dri2proto") + # depends_on("libxcb") + # depends_on("libxshmfence") + def install(self, spec, prefix): configure("--prefix=%s" % prefix) -- cgit v1.2.3-70-g09d2 From befe72b9b9d1f4107eef8f24e6f194f27a15bca4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 19:36:30 -0800 Subject: directory_layout now raises an error when an install fails. --- lib/spack/spack/directory_layout.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index efc40a17a4..37740720a2 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -109,12 +109,17 @@ class DirectoryLayout(object): def remove_path_for_spec(self, spec): - """Removes a prefix and any empty parent directories from the root.""" + """Removes a prefix and any empty parent directories from the root. + Raised RemoveFailedError if something goes wrong. + """ path = self.path_for_spec(spec) assert(path.startswith(self.root)) if os.path.exists(path): - shutil.rmtree(path, True) + try: + shutil.rmtree(path) + except exceptions.OSError, e: + raise RemoveFailedError(spec, path, e) path = os.path.dirname(path) while path != self.root: @@ -330,6 +335,15 @@ class SpecHashCollisionError(DirectoryLayoutError): % installed_spec, new_spec) +class RemoveFailedError(DirectoryLayoutError): + """Raised when a DirectoryLayout cannot remove an install prefix.""" + def __init__(self, installed_spec, prefix, error): + super(RemoveFailedError, self).__init__( + 'Could not remove prefix %s for %s : %s' + % prefix, installed_spec.short_spec, error) + self.cause = error + + class InconsistentInstallDirectoryError(DirectoryLayoutError): """Raised when a package seems to be installed to the wrong place.""" def __init__(self, message): @@ -370,3 +384,5 @@ class NoSuchExtensionError(DirectoryLayoutError): super(NoSuchExtensionError, self).__init__( "%s cannot be removed from %s because it's not installed."% ( extension_spec.short_spec, spec.short_spec)) + + -- cgit v1.2.3-70-g09d2 From e51e01f4f066f9b0f412354cb8abab3642845d43 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 19:39:36 -0800 Subject: Cleaned up python to remove redundant line. --- var/spack/packages/python/package.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 23b528b089..8bffbf393c 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -60,16 +60,16 @@ class Python(Package): module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) - # Add site packages directory to the PYTHONPATH - os.environ['PYTHONPATH'] = module.site_packages_dir - # Make the site packages directory if it does not exist already. mkdirp(module.site_packages_dir) - # Add dependent packages' site-packages directory to PYTHONPATH + # Set PYTHONPATH to include site-packages dir for the + # extension and any other python extensions it depends on. + python_paths = [] for d in ext_spec.traverse(): if d.package.extends(self.spec): - os.environ['PYTHONPATH'] += ':' + os.path.join(d.prefix, self.site_packages_dir) + python_paths.append(os.path.join(d.prefix, self.site_packages_dir)) + os.environ['PYTHONPATH'] = ':'.join(python_paths) # ======================================================================== -- cgit v1.2.3-70-g09d2 From 60a385d4a44b16bce77725dbb4f71a26aa610ffc Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 19:40:28 -0800 Subject: Minor textual error in extensions command. --- lib/spack/spack/cmd/extensions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 3b189895b0..c6b6145321 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -82,7 +82,7 @@ def extensions(parser, args): installed = [s.spec for s in spack.db.installed_extensions_for(spec)] print if not installed: - tty.msg("None activated.") + tty.msg("None installed.") return tty.msg("%d installed:" % len(installed)) spack.cmd.find.display_specs(installed, mode=args.mode) -- cgit v1.2.3-70-g09d2 From 20ec80295dbf2a36b633860c139d113f9c0e6388 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 19:41:17 -0800 Subject: setup_extension_environment is now setup_dependent_environment. - other packages, like Qt, can now use this to set up relevant build variables and env vars for their dependencies. - not just extensions anymore. --- lib/spack/spack/package.py | 14 ++++++++------ var/spack/packages/python/package.py | 2 +- var/spack/packages/qt/package.py | 6 ++++++ 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b905968540..6e319a1f87 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -829,10 +829,10 @@ class Package(object): self.stage.chdir_to_source() build_env.setup_package(self) - # Allow extendees to further set up the environment. - if self.is_extension: - self.extendee_spec.package.setup_extension_environment( - self.module, self.extendee_spec, self.spec) + # Allow dependencies to further set up the environment. + for dep_spec in self.spec.traverse(root=False): + dep_spec.package.setup_dependent_environment( + self.module, dep_spec, self.spec) if fake_install: self.do_fake_install() @@ -910,8 +910,8 @@ class Package(object): fromlist=[self.__class__.__name__]) - def setup_extension_environment(self, module, spec, ext_spec): - """Called before the install() method of extensions. + def setup_dependent_environment(self, module, spec, dependent_spec): + """Called before the install() method of dependents. Default implementation does nothing, but this can be overridden by an extendable package to set up the install @@ -930,6 +930,8 @@ class Package(object): put a 'python' Execuable object in the module scope for the extension package to simplify extension installs. + 3. A lot of Qt extensions need QTDIR set. This can be used to do that. + """ pass diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 8bffbf393c..eed81d095c 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -46,7 +46,7 @@ class Python(Package): return os.path.join(self.python_lib_dir, 'site-packages') - def setup_extension_environment(self, module, spec, ext_spec): + def setup_dependent_environment(self, module, spec, ext_spec): """Called before python modules' install() methods. In most cases, extensions will only need to have one line:: diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 6a55c89701..fcbcd2491a 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -1,3 +1,4 @@ +import os from spack import * class Qt(Package): @@ -20,6 +21,11 @@ class Qt(Package): depends_on("libmng") depends_on("jpeg") + def setup_dependent_environment(self, module, spec, dep_spec): + """Dependencies of Qt find it using the QTDIR environment variable.""" + os.environ['QTDIR'] = self.prefix + + def patch(self): # Fix qmake compilers in the default mkspec qmake_conf = 'mkspecs/common/g++-base.conf' -- cgit v1.2.3-70-g09d2 From f81b136547347997d043bd99adf9d7013f097c69 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 19:43:10 -0800 Subject: import fix in cmd/clean --- lib/spack/spack/cmd/clean.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 79dd91c5bf..ec3b221988 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from external import argparse +import subprocess import llnl.util.tty as tty -- cgit v1.2.3-70-g09d2 From c077f0570531aac76b9182a96a2a37ee0e633989 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 22:01:00 -0800 Subject: Move dpeendency environment setup to build_environemnt. --- lib/spack/spack/build_environment.py | 5 +++++ lib/spack/spack/package.py | 5 ----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 45353ec640..cabde7dc86 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -207,3 +207,8 @@ def setup_package(pkg): set_compiler_environment_variables(pkg) set_build_environment_variables(pkg) set_module_variables_for_package(pkg) + + # Allow dependencies to set up environment as well. + for dep_spec in pkg.spec.traverse(root=False): + dep_spec.package.setup_dependent_environment( + pkg.module, dep_spec, pkg.spec) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 6e319a1f87..b18d054990 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -829,11 +829,6 @@ class Package(object): self.stage.chdir_to_source() build_env.setup_package(self) - # Allow dependencies to further set up the environment. - for dep_spec in self.spec.traverse(root=False): - dep_spec.package.setup_dependent_environment( - self.module, dep_spec, self.spec) - if fake_install: self.do_fake_install() else: -- cgit v1.2.3-70-g09d2 From aae364b4c9ffdf8949a8c7ce5a7e63c9772703c0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 8 Feb 2015 23:26:15 -0800 Subject: "spack extensions" shows total extension count. --- lib/spack/spack/cmd/extensions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index c6b6145321..ae73d8ac55 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -75,7 +75,8 @@ def extensions(parser, args): if not extensions: tty.msg("%s has no extensions." % spec.cshort_spec) return - tty.msg("%s extensions:" % spec.cshort_spec) + tty.msg(spec.cshort_spec) + tty.msg("%d extensions:" % len(extensions)) colify(ext.name for ext in extensions) # List specs of installed extensions. -- cgit v1.2.3-70-g09d2 From 27617670f013bd1dbde24079bd7ae38331530695 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Mon, 9 Feb 2015 00:01:07 -0800 Subject: qt with hardware accelerated opengl working --- var/spack/packages/icu4c/package.py | 17 +++++++++++++++++ var/spack/packages/qt/package.py | 20 ++++++++++++++++++-- var/spack/packages/ruby/package.py | 16 ++++++++++++++++ 3 files changed, 51 insertions(+), 2 deletions(-) create mode 100644 var/spack/packages/icu4c/package.py create mode 100644 var/spack/packages/ruby/package.py diff --git a/var/spack/packages/icu4c/package.py b/var/spack/packages/icu4c/package.py new file mode 100644 index 0000000000..55b44463b2 --- /dev/null +++ b/var/spack/packages/icu4c/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Icu4c(Package): + """ICU is a mature, widely used set of C/C++ and Java libraries + providing Unicode and Globalization support for software applications.""" + + homepage = "http://site.icu-project.org/" + url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz" + + version('54_1', 'e844caed8f2ca24c088505b0d6271bc0') + + def install(self, spec, prefix): + cd("source") + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 96e484b995..c8b19d07f5 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -1,4 +1,5 @@ from spack import * +import os class Qt(Package): """Qt is a comprehensive cross-platform C++ application framework.""" @@ -29,7 +30,16 @@ class Qt(Package): depends_on("libmng") depends_on("jpeg") - depends_on("gperf") # Needed to build Qt with webkit. + # Webkit + # depends_on("gperf") + # depends_on("flex") + # depends_on("bison") + # depends_on("ruby") + # depends_on("icu4c") + + # OpenGL hardware acceleration + depends_on("mesa") + depends_on("libxcb") def patch(self): if self.spec.satisfies('@4'): @@ -46,13 +56,19 @@ class Qt(Package): def install(self, spec, prefix): + # Apparently this is the only way to + # "truly" get rid of webkit compiles now... + os.rename("qtwebkit","no-qtwebkit") + os.rename("qtwebkit-examples","no-qtwebkit-examples") configure('-v', '-confirm-license', '-opensource', '-prefix', prefix, '-openssl-linked', '-dbus-linked', - '-fast', + #'-fast', + '-opengl', + '-qt-xcb', '-optimized-qmake', '-no-pch', # phonon required for py-pyqt4 diff --git a/var/spack/packages/ruby/package.py b/var/spack/packages/ruby/package.py new file mode 100644 index 0000000000..718fd0a3be --- /dev/null +++ b/var/spack/packages/ruby/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Ruby(Package): + """A dynamic, open source programming language with a focus on + simplicity and productivity.""" + + homepage = "https://www.ruby-lang.org/" + url = "http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz" + + version('2.2.0', 'cd03b28fd0b555970f5c4fd481700852') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") -- cgit v1.2.3-70-g09d2 From d1d0b85d80a1cf46e6417c0d467da20bbea1eba0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 9 Feb 2015 01:13:56 -0800 Subject: Add Alfredo to contributors. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7a13dba2a8..74a327b517 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,7 @@ people: * David Beckingsale * David Boehme + * Alfredo Gimenez * Luc Jaulmes * Matt Legendre * Greg Lee -- cgit v1.2.3-70-g09d2 From 25af341954fa7c9a88943fe24cad88275ee00795 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 9 Feb 2015 02:54:49 -0800 Subject: Python package improvements. --- var/spack/packages/geos/package.py | 31 +++++++++++++++++++++++++++++ var/spack/packages/py-basemap/package.py | 9 ++------- var/spack/packages/py-biopython/package.py | 1 + var/spack/packages/py-gnuplot/package.py | 1 + var/spack/packages/py-libxml2/package.py | 1 + var/spack/packages/py-matplotlib/package.py | 3 +++ var/spack/packages/py-pyside/package.py | 31 +++++++++++++++++++++++++---- var/spack/packages/py-shiboken/package.py | 21 +++++++++++++++++++ var/spack/packages/python/package.py | 5 +++-- 9 files changed, 90 insertions(+), 13 deletions(-) create mode 100644 var/spack/packages/geos/package.py create mode 100644 var/spack/packages/py-shiboken/package.py diff --git a/var/spack/packages/geos/package.py b/var/spack/packages/geos/package.py new file mode 100644 index 0000000000..4a2657e32f --- /dev/null +++ b/var/spack/packages/geos/package.py @@ -0,0 +1,31 @@ +from spack import * + +class Geos(Package): + """GEOS (Geometry Engine - Open Source) is a C++ port of the Java + Topology Suite (JTS). As such, it aims to contain the complete + functionality of JTS in C++. This includes all the OpenGIS + Simple Features for SQL spatial predicate functions and spatial + operators, as well as specific JTS enhanced topology functions.""" + + homepage = "http://trac.osgeo.org/geos/" + url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2" + + version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae') + version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e') + version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488') + version('3.3.9', '4794c20f07721d5011c93efc6ccb8e4e') + version('3.3.8', '75be476d0831a2d14958fed76ca266de') + version('3.3.7', '95ab996d22672b067d92c7dee2170460') + version('3.3.6', '6fadfb941541875f4976f75fb0bbc800') + version('3.3.5', '2ba61afb7fe2c5ddf642d82d7b16e75b') + version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1') + version('3.3.3', '8454e653d7ecca475153cc88fd1daa26') + + extends('python') + depends_on('swig') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-python") + make() + make("install") diff --git a/var/spack/packages/py-basemap/package.py b/var/spack/packages/py-basemap/package.py index 7b6d8e7e65..45f1085ba1 100644 --- a/var/spack/packages/py-basemap/package.py +++ b/var/spack/packages/py-basemap/package.py @@ -8,18 +8,13 @@ class PyBasemap(Package): version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8') - geos_version = {'1.0.7' : '3.3.3'} - extends('python') depends_on('py-setuptools') depends_on('py-numpy') depends_on('py-matplotlib') depends_on('py-pil') + depends_on("geos") def install(self, spec, prefix): - with working_dir('geos-%s' % self.geos_version[str(self.version)]): - configure("--prefix=" + prefix) - make() - make("install") - os.environ['GEOS_DIR'] = prefix + env['GEOS_DIR'] = spec['geos'].prefix python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-biopython/package.py b/var/spack/packages/py-biopython/package.py index 2ed04c389e..8ecaf48626 100644 --- a/var/spack/packages/py-biopython/package.py +++ b/var/spack/packages/py-biopython/package.py @@ -9,6 +9,7 @@ class PyBiopython(Package): extends('python') depends_on('py-mx') + depends_on('py-numpy') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-gnuplot/package.py b/var/spack/packages/py-gnuplot/package.py index 0a2c073a49..ede4472c03 100644 --- a/var/spack/packages/py-gnuplot/package.py +++ b/var/spack/packages/py-gnuplot/package.py @@ -8,6 +8,7 @@ class PyGnuplot(Package): version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1') extends('python') + depends_on('py-numpy') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-libxml2/package.py b/var/spack/packages/py-libxml2/package.py index 0dcefbd9cf..e645acb5dd 100644 --- a/var/spack/packages/py-libxml2/package.py +++ b/var/spack/packages/py-libxml2/package.py @@ -8,6 +8,7 @@ class PyLibxml2(Package): version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3') extends('python') + depends_on('libxml2') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py index 5979ceeab0..8b8684c563 100644 --- a/var/spack/packages/py-matplotlib/package.py +++ b/var/spack/packages/py-matplotlib/package.py @@ -17,9 +17,12 @@ class PyMatplotlib(Package): depends_on('py-pytz') depends_on('py-nose') depends_on('py-numpy') + depends_on('qt') + def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) + if str(self.version) == '1.4.2': # hack to fix configuration file config_file = None diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py index b01e16d7e6..7528a0bf72 100644 --- a/var/spack/packages/py-pyside/package.py +++ b/var/spack/packages/py-pyside/package.py @@ -10,9 +10,32 @@ class PyPyside(Package): version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d') extends('python') + depends_on('py-setuptools') + depends_on('qt@:4') + + + def patch(Self): + """Undo PySide RPATH handling and add Spack RPATH.""" + # Add Spack's standard CMake args to the sub-builds. + # They're called BY setup.py so we have to patch it. + filter_file( + r'OPTION_CMAKE,', + r'OPTION_CMAKE, ' + ( + '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(self.rpath)), + 'setup.py') + + # PySide tries to patch ELF files to remove RPATHs + # Disable this and go with the one we set. + filter_file( + r'rpath_cmd\(pyside_path, srcpath\)', + r'#rpath_cmd(pyside_path, srcpath)', + 'pyside_postinstall.py') + def install(self, spec, prefix): - qmake_path = '/usr/lib64/qt4/bin/qmake' - if not os.path.exists(qmake_path): - raise spack.package.InstallError("Failed to find qmake in %s" % qmake_path) - python('setup.py', 'install', '--prefix=%s' % prefix, '--qmake=%s' % qmake_path) + python('setup.py', 'install', + '--prefix=%s' % prefix, + '--jobs=%s' % make_jobs) + + diff --git a/var/spack/packages/py-shiboken/package.py b/var/spack/packages/py-shiboken/package.py new file mode 100644 index 0000000000..47abe64e65 --- /dev/null +++ b/var/spack/packages/py-shiboken/package.py @@ -0,0 +1,21 @@ +from spack import * + +class PyShiboken(Package): + """Shiboken generates bindings for C++ libraries using CPython source code.""" + homepage = "https://shiboken.readthedocs.org/" + url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz" + + version('1.2.2', '345cfebda221f525842e079a6141e555') + + # TODO: make build dependency + # depends_on("cmake") + + extends('python') + depends_on("py-setuptools") + depends_on("libxml2") + depends_on("qt@:4.8") + + def install(self, spec, prefix): + python('setup.py', 'install', + '--prefix=%s' % prefix, + '--jobs=%s' % make_jobs) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index eed81d095c..4b3b31eb6b 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,9 +1,10 @@ -from spack import * -import spack import os import re from contextlib import closing +from spack import * +import spack + class Python(Package): """The Python programming language.""" -- cgit v1.2.3-70-g09d2 From 5c2608b032f04c063c437eca7d8360baaf6a2a16 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Mon, 9 Feb 2015 15:55:18 -0800 Subject: typo: Self -> self --- var/spack/packages/py-pyside/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py index 7528a0bf72..1fd037d75f 100644 --- a/var/spack/packages/py-pyside/package.py +++ b/var/spack/packages/py-pyside/package.py @@ -14,7 +14,7 @@ class PyPyside(Package): depends_on('qt@:4') - def patch(Self): + def patch(self): """Undo PySide RPATH handling and add Spack RPATH.""" # Add Spack's standard CMake args to the sub-builds. # They're called BY setup.py so we have to patch it. -- cgit v1.2.3-70-g09d2 From 93067d0d63570425e473862de7ce72dc6091c34c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 01:44:38 -0800 Subject: Add profile option to spack script. --- bin/spack | 89 +++++++++++++++++++++++++++++++++++---------------------------- 1 file changed, 49 insertions(+), 40 deletions(-) diff --git a/bin/spack b/bin/spack index 626d9d9d11..c49caf37f9 100755 --- a/bin/spack +++ b/bin/spack @@ -58,14 +58,16 @@ parser = argparse.ArgumentParser( description='Spack: the Supercomputing PACKage Manager.') parser.add_argument('-V', '--version', action='version', version="%s" % spack.spack_version) -parser.add_argument('-v', '--verbose', action='store_true', dest='verbose', +parser.add_argument('-v', '--verbose', action='store_true', help="Print additional output during builds") -parser.add_argument('-d', '--debug', action='store_true', dest='debug', +parser.add_argument('-d', '--debug', action='store_true', help="Write out debug logs during compile") -parser.add_argument('-k', '--insecure', action='store_true', dest='insecure', +parser.add_argument('-k', '--insecure', action='store_true', help="Do not check ssl certificates when downloading archives.") -parser.add_argument('-m', '--mock', action='store_true', dest='mock', +parser.add_argument('-m', '--mock', action='store_true', help="Use mock packages instead of real ones.") +parser.add_argument('-p', '--profile', action='store_true', + help="Profile execution using cProfile.") # each command module implements a parser() function, to which we pass its # subparser for setup. @@ -85,42 +87,49 @@ if len(sys.argv) == 1: # actually parse the args. args = parser.parse_args() -# Set up environment based on args. -tty.set_verbose(args.verbose) -tty.set_debug(args.debug) -spack.debug = args.debug - -spack.spack_working_dir = working_dir -if args.mock: - from spack.packages import PackageDB - spack.db = PackageDB(spack.mock_packages_path) - -# If the user asked for it, don't check ssl certs. -if args.insecure: - tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.") - spack.curl.add_default_arg('-k') - -# Try to load the particular command asked for and run it -command = spack.cmd.get_command(args.command) -try: - return_val = command(parser, args) -except SpackError, e: - if spack.debug: - # In debug mode, raise with a full stack trace. - raise - elif e.long_message: - tty.die(e.message, e.long_message) +def main(): + # Set up environment based on args. + tty.set_verbose(args.verbose) + tty.set_debug(args.debug) + spack.debug = args.debug + + spack.spack_working_dir = working_dir + if args.mock: + from spack.packages import PackageDB + spack.db = PackageDB(spack.mock_packages_path) + + # If the user asked for it, don't check ssl certs. + if args.insecure: + tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.") + spack.curl.add_default_arg('-k') + + # Try to load the particular command asked for and run it + command = spack.cmd.get_command(args.command) + try: + return_val = command(parser, args) + except SpackError, e: + if spack.debug: + # In debug mode, raise with a full stack trace. + raise + elif e.long_message: + tty.die(e.message, e.long_message) + else: + tty.die(e.message) + + except KeyboardInterrupt: + sys.stderr.write('\n') + tty.die("Keyboard interrupt.") + + # Allow commands to return values if they want to exit with some ohter code. + if return_val is None: + sys.exit(0) + elif isinstance(return_val, int): + sys.exit(return_val) else: - tty.die(e.message) - -except KeyboardInterrupt: - sys.stderr.write('\n') - tty.die("Keyboard interrupt.") + tty.die("Bad return value from command %s: %s" % (args.command, return_val)) -# Allow commands to return values if they want to exit with some ohter code. -if return_val is None: - sys.exit(0) -elif isinstance(return_val, int): - sys.exit(return_val) +if args.profile: + import cProfile + cProfile.run('main()', sort='tottime') else: - tty.die("Bad return value from command %s: %s" % (args.command, return_val)) + main() -- cgit v1.2.3-70-g09d2 From 82dc935a50874e899380f32a9a35b7cc4f76df87 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 01:49:50 -0800 Subject: installed_extensions_for no longer fails when nothing known about pkg --- lib/spack/spack/packages.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 7ef8135c1a..3c81863c11 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -119,8 +119,15 @@ class PackageDB(object): @_autospec def installed_extensions_for(self, extendee_spec): - return [s.package for s in self.installed_package_specs() - if s.package.extends(extendee_spec)] + for s in self.installed_package_specs(): + try: + if s.package.extends(extendee_spec): + yield s.package + except UnknownPackageError, e: + # Skip packages we know nothing about + continue + # TODO: add some conditional way to do this instead of + # catching exceptions. def dirname_for_package_name(self, pkg_name): -- cgit v1.2.3-70-g09d2 From c0c08799249fb56c281f62b3659e7cf7d7080188 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 01:58:35 -0800 Subject: Better extension activation/deactivation --- lib/spack/spack/cmd/extensions.py | 4 +- lib/spack/spack/directory_layout.py | 150 +++++++++++++++++++++++++---------- lib/spack/spack/package.py | 19 ++++- var/spack/packages/python/package.py | 16 ++-- 4 files changed, 133 insertions(+), 56 deletions(-) diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index ae73d8ac55..fc8e6842c3 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -89,10 +89,10 @@ def extensions(parser, args): spack.cmd.find.display_specs(installed, mode=args.mode) # List specs of activated extensions. - activated = spack.install_layout.get_extensions(spec) + activated = spack.install_layout.extension_map(spec) print if not activated: tty.msg("None activated.") return tty.msg("%d currently activated:" % len(activated)) - spack.cmd.find.display_specs(activated, mode=args.mode) + spack.cmd.find.display_specs(activated.values(), mode=args.mode) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 37740720a2..562c0bd3ed 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -27,6 +27,7 @@ import os import exceptions import hashlib import shutil +import tempfile from contextlib import closing import llnl.util.tty as tty @@ -84,17 +85,38 @@ class DirectoryLayout(object): raise NotImplementedError() - def get_extensions(self, spec): - """Get a set of currently installed extension packages for a spec.""" + def extension_map(self, spec): + """Get a dict of currently installed extension packages for a spec. + + Dict maps { name : extension_spec } + Modifying dict does not affect internals of this layout. + """ + raise NotImplementedError() + + + def check_extension_conflict(self, spec, ext_spec): + """Ensure that ext_spec can be activated in spec. + + If not, raise ExtensionAlreadyInstalledError or + ExtensionConflictError. + """ + raise NotImplementedError() + + + def check_activated(self, spec, ext_spec): + """Ensure that ext_spec can be removed from spec. + + If not, raise NoSuchExtensionError. + """ raise NotImplementedError() - def add_extension(self, spec, extension_spec): + def add_extension(self, spec, ext_spec): """Add to the list of currently installed extensions.""" raise NotImplementedError() - def remove_extension(self, spec, extension_spec): + def remove_extension(self, spec, ext_spec): """Remove from the list of currently installed extensions.""" raise NotImplementedError() @@ -173,6 +195,8 @@ class SpecHashDirectoryLayout(DirectoryLayout): self.spec_file_name = spec_file_name self.extension_file_name = extension_file_name + # Cache of already written/read extension maps. + self._extension_maps = {} @property def hidden_file_paths(self): @@ -271,54 +295,94 @@ class SpecHashDirectoryLayout(DirectoryLayout): return join_path(self.path_for_spec(spec), self.extension_file_name) - def get_extensions(self, spec): + def _extension_map(self, spec): + """Get a dict spec> for all extensions currnetly + installed for this package.""" _check_concrete(spec) - extensions = set() - path = self.extension_file_path(spec) - if os.path.exists(path): - with closing(open(path)) as ext_file: - for line in ext_file: - try: - extensions.add(Spec(line.strip())) - except spack.error.SpackError, e: - raise InvalidExtensionSpecError(str(e)) - return extensions + if not spec in self._extension_maps: + path = self.extension_file_path(spec) + if not os.path.exists(path): + self._extension_maps[spec] = {} + + else: + exts = {} + with closing(open(path)) as ext_file: + for line in ext_file: + try: + spec = Spec(line.strip()) + exts[spec.name] = spec + except spack.error.SpackError, e: + # TODO: do something better here -- should be + # resilient to corrupt files. + raise InvalidExtensionSpecError(str(e)) + self._extension_maps[spec] = exts + + return self._extension_maps[spec] + + + def extension_map(self, spec): + """Defensive copying version of _extension_map() for external API.""" + return self._extension_map(spec).copy() + + + def check_extension_conflict(self, spec, ext_spec): + exts = self._extension_map(spec) + if ext_spec.name in exts: + installed_spec = exts[ext_spec.name] + if ext_spec == installed_spec: + raise ExtensionAlreadyInstalledError(spec, ext_spec) + else: + raise ExtensionConflictError(spec, ext_spec, installed_spec) + + def check_activated(self, spec, ext_spec): + exts = self._extension_map(spec) + if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]): + raise NoSuchExtensionError(spec, ext_spec) - def write_extensions(self, spec, extensions): + + def _write_extensions(self, spec, extensions): path = self.extension_file_path(spec) - with closing(open(path, 'w')) as spec_file: - for extension in sorted(extensions): - spec_file.write("%s\n" % extension) + + # Create a temp file in the same directory as the actual file. + dirname, basename = os.path.split(path) + tmp = tempfile.NamedTemporaryFile( + prefix=basename, dir=dirname, delete=False) + + # Write temp file. + with closing(tmp): + for extension in sorted(extensions.values()): + tmp.write("%s\n" % extension) + + # Atomic update by moving tmpfile on top of old one. + os.rename(tmp.name, path) - def add_extension(self, spec, extension_spec): + def add_extension(self, spec, ext_spec): _check_concrete(spec) - _check_concrete(extension_spec) + _check_concrete(ext_spec) - exts = self.get_extensions(spec) - if extension_spec in exts: - raise ExtensionAlreadyInstalledError(spec, extension_spec) - else: - for already_installed in exts: - if spec.name == extension_spec.name: - raise ExtensionConflictError(spec, extension_spec, already_installed) + # Check whether it's already installed or if it's a conflict. + exts = self.extension_map(spec) + self.check_extension_conflict(spec, ext_spec) - exts.add(extension_spec) - self.write_extensions(spec, exts) + # do the actual adding. + exts[ext_spec.name] = ext_spec + self._write_extensions(spec, exts) - def remove_extension(self, spec, extension_spec): + def remove_extension(self, spec, ext_spec): _check_concrete(spec) - _check_concrete(extension_spec) + _check_concrete(ext_spec) - exts = self.get_extensions(spec) - if not extension_spec in exts: - raise NoSuchExtensionError(spec, extension_spec) + # Make sure it's installed before removing. + exts = self.extension_map(spec) + self.check_activated(spec, ext_spec) - exts.remove(extension_spec) - self.write_extensions(spec, exts) + # do the actual removing. + del exts[ext_spec.name] + self._write_extensions(spec, exts) class DirectoryLayoutError(SpackError): @@ -365,24 +429,24 @@ class InvalidExtensionSpecError(DirectoryLayoutError): class ExtensionAlreadyInstalledError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" - def __init__(self, spec, extension_spec): + def __init__(self, spec, ext_spec): super(ExtensionAlreadyInstalledError, self).__init__( - "%s is already installed in %s" % (extension_spec.short_spec, spec.short_spec)) + "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec)) class ExtensionConflictError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" - def __init__(self, spec, extension_spec, conflict): + def __init__(self, spec, ext_spec, conflict): super(ExtensionConflictError, self).__init__( "%s cannot be installed in %s because it conflicts with %s."% ( - extension_spec.short_spec, spec.short_spec, conflict.short_spec)) + ext_spec.short_spec, spec.short_spec, conflict.short_spec)) class NoSuchExtensionError(DirectoryLayoutError): """Raised when an extension isn't there on remove.""" - def __init__(self, spec, extension_spec): + def __init__(self, spec, ext_spec): super(NoSuchExtensionError, self).__init__( "%s cannot be removed from %s because it's not installed."% ( - extension_spec.short_spec, spec.short_spec)) + ext_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b18d054990..a624c1ebf5 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -534,7 +534,8 @@ class Package(object): if not self.is_extension: raise ValueError("is_extension called on package that is not an extension.") - return self.spec in spack.install_layout.get_extensions(self.extendee_spec) + exts = spack.install_layout.extension_map(self.extendee_spec) + return (self.name in exts) and (exts[self.name] == self.spec) def preorder_traversal(self, visited=None, **kwargs): @@ -987,6 +988,8 @@ class Package(object): activate() directly. """ self._sanity_check_extension() + spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec) + self.extendee_spec.package.activate(self, **self.extendee_args) spack.install_layout.add_extension(self.extendee_spec, self.spec) @@ -1014,12 +1017,22 @@ class Package(object): tree.merge(self.prefix, ignore=ignore) - def do_deactivate(self): + def do_deactivate(self, **kwargs): """Called on the extension to invoke extendee's deactivate() method.""" + force = kwargs.get('force', False) + self._sanity_check_extension() + + # Allow a force deactivate to happen. This can unlink + # spurious files if something was corrupted. + if not force: + spack.install_layout.check_activated(self.extendee_spec, self.spec) + self.extendee_spec.package.deactivate(self, **self.extendee_args) - if self.spec in spack.install_layout.get_extensions(self.extendee_spec): + # redundant activation check -- makes SURE the spec is not + # still activated even if something was wrong above. + if self.activated: spack.install_layout.remove_extension(self.extendee_spec, self.spec) tty.msg("Deactivated extension %s for %s." diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 4b3b31eb6b..de7f412b52 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -98,9 +98,9 @@ class Python(Package): return ignore - def write_easy_install_pth(self, extensions): + def write_easy_install_pth(self, exts): paths = [] - for ext in extensions: + for ext in sorted(exts.values()): ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir) easy_pth = "%s/easy-install.pth" % ext_site_packages @@ -139,15 +139,15 @@ class Python(Package): args.update(ignore=self.python_ignore(ext_pkg, args)) super(Python, self).activate(ext_pkg, **args) - extensions = set(spack.install_layout.get_extensions(self.spec)) - extensions.add(ext_pkg.spec) - self.write_easy_install_pth(extensions) + exts = spack.install_layout.extension_map(self.spec) + exts[ext_pkg.name] = ext_pkg.spec + self.write_easy_install_pth(exts) def deactivate(self, ext_pkg, **args): args.update(ignore=self.python_ignore(ext_pkg, args)) super(Python, self).deactivate(ext_pkg, **args) - extensions = set(spack.install_layout.get_extensions(self.spec)) - extensions.remove(ext_pkg.spec) - self.write_easy_install_pth(extensions) + exts = spack.install_layout.extension_map(self.spec) + del exts[ext_pkg.name] + self.write_easy_install_pth(exts) -- cgit v1.2.3-70-g09d2 From 3c0048dd89e4d18ac95afd19b65d7ce54a48d862 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 01:59:36 -0800 Subject: py-sip installs properly into a prefix --- var/spack/packages/py-sip/package.py | 7 ++++++- var/spack/packages/python/package.py | 10 ++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/py-sip/package.py b/var/spack/packages/py-sip/package.py index 06aea35a74..6753bdd2a5 100644 --- a/var/spack/packages/py-sip/package.py +++ b/var/spack/packages/py-sip/package.py @@ -1,4 +1,5 @@ from spack import * +import os class PySip(Package): """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries.""" @@ -10,6 +11,10 @@ class PySip(Package): extends('python') def install(self, spec, prefix): - python('configure.py') + python('configure.py', + '--destdir=%s' % site_packages_dir, + '--bindir=%s' % spec.prefix.bin, + '--incdir=%s' % python_include_dir, + '--sipdir=%s' % os.path.join(spec.prefix.share, 'sip')) make() make('install') diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index de7f412b52..fb875a7eeb 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -42,6 +42,11 @@ class Python(Package): return os.path.join('lib', 'python%d.%d' % self.version[:2]) + @property + def python_include_dir(self): + return os.path.join('include', 'python%d.%d' % self.version[:2]) + + @property def site_packages_dir(self): return os.path.join(self.python_lib_dir, 'site-packages') @@ -58,8 +63,9 @@ class Python(Package): module.python = Executable(join_path(spec.prefix.bin, 'python')) # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. - module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) - module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) + module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) + module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir) + module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) # Make the site packages directory if it does not exist already. mkdirp(module.site_packages_dir) -- cgit v1.2.3-70-g09d2 From d1e03329c5c16ba38b082c3473e7c6970f168990 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 11:50:13 -0800 Subject: Memoize all_specs() and exists() for better performance. - Real bottleneck is calling normalize() for every spec when we read it. - Need to store graph information in spec files to avoid the need for this. - Also, normalizing old specs isn't always possible, so we need to do this anyway. --- lib/spack/spack/directory_layout.py | 11 +++++++++-- lib/spack/spack/packages.py | 1 + 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 562c0bd3ed..5b80e93d6b 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -31,6 +31,7 @@ import tempfile from contextlib import closing import llnl.util.tty as tty +from llnl.util.lang import memoized from llnl.util.filesystem import join_path, mkdirp import spack @@ -223,6 +224,9 @@ class SpecHashDirectoryLayout(DirectoryLayout): if all(spack.db.exists(s.name) for s in spec.traverse()): copy = spec.copy() + + # TODO: It takes a lot of time to normalize every spec on read. + # TODO: Storing graph info with spec files would fix this. copy.normalize() if copy.concrete: return copy # These are specs spack still understands. @@ -276,17 +280,20 @@ class SpecHashDirectoryLayout(DirectoryLayout): self.write_spec(spec, spec_file_path) + @memoized def all_specs(self): if not os.path.isdir(self.root): - return + return [] + specs = [] for path in traverse_dirs_at_depth(self.root, 3): arch, compiler, last_dir = path spec_file_path = join_path( self.root, arch, compiler, last_dir, self.spec_file_name) if os.path.exists(spec_file_path): spec = self.read_spec(spec_file_path) - yield spec + specs.append(spec) + return specs def extension_file_path(self, spec): diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 3c81863c11..43c4c191c1 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -192,6 +192,7 @@ class PackageDB(object): yield self.get(name) + @memoized def exists(self, pkg_name): """Whether a package with the supplied name exists .""" return os.path.exists(self.filename_for_package_name(pkg_name)) -- cgit v1.2.3-70-g09d2 From 2f67cdaf10a3e64474f7ac242518fdfe93e9c87a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 12:39:10 -0800 Subject: Better time output on build completion. --- lib/spack/spack/package.py | 16 ++++++++++++++-- var/spack/packages/py-pyqt/package.py | 21 +++++++++++++++++++++ var/spack/packages/py-pyqt4/package.py | 18 ------------------ 3 files changed, 35 insertions(+), 20 deletions(-) create mode 100644 var/spack/packages/py-pyqt/package.py delete mode 100644 var/spack/packages/py-pyqt4/package.py diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index a624c1ebf5..c48816cb5b 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -848,8 +848,8 @@ class Package(object): build_time = self._total_time - self._fetch_time tty.msg("Successfully installed %s." % self.name, - "Fetch: %.2f sec. Build: %.2f sec. Total: %.2f sec." - % (self._fetch_time, build_time, self._total_time)) + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) print_pkg(self.prefix) # Use os._exit here to avoid raising a SystemExit exception, @@ -1201,6 +1201,18 @@ def print_pkg(message): print message +def _hms(seconds): + """Convert time in seconds to hours, minutes, seconds.""" + m, s = divmod(seconds, 60) + h, m = divmod(m, 60) + + parts = [] + if h: parts.append("%dh" % h) + if m: parts.append("%dm" % m) + if s: parts.append("%.2fs" % s) + return ' '.join(parts) + + class FetchError(spack.error.SpackError): """Raised when something goes wrong during fetch.""" def __init__(self, message, long_msg=None): diff --git a/var/spack/packages/py-pyqt/package.py b/var/spack/packages/py-pyqt/package.py new file mode 100644 index 0000000000..cb40af351a --- /dev/null +++ b/var/spack/packages/py-pyqt/package.py @@ -0,0 +1,21 @@ +from spack import * + +class PyPyqt(Package): + """PyQt is a set of Python v2 and v3 bindings for Digia's Qt + application framework and runs on all platforms supported by Qt + including Windows, MacOS/X and Linux.""" + homepage = "http://www.riverbankcomputing.com/software/pyqt/intro" + url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz" + + version('4.11.3', '997c3e443165a89a559e0d96b061bf70') + + extends('python') + depends_on('qt@4') # TODO: allow qt5 when conditional deps are supported. + depends_on('py-sip') + + def install(self, spec, prefix): + python('configure.py', + '--confirm-license', + '--destdir=%s' % site_packages_dir) + make() + make('install') diff --git a/var/spack/packages/py-pyqt4/package.py b/var/spack/packages/py-pyqt4/package.py deleted file mode 100644 index eeb1382560..0000000000 --- a/var/spack/packages/py-pyqt4/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class PyPyqt4(Package): - """PyQt is a set of Python v2 and v3 bindings for Digia's Qt application framework and runs on all platforms supported by Qt including Windows, MacOS/X and Linux.""" - homepage = "http://www.riverbankcomputing.com/software/pyqt/intro" - url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz" - - version('4.11.3', '997c3e443165a89a559e0d96b061bf70') - - extends('python') - depends_on('qt') - depends_on('py-sip') - - def install(self, spec, prefix): - version_array = str(spec['python'].version).split('.') - python('configure.py', '--confirm-license', '--destdir=%s/python%s.%s/site-packages' %(self.prefix.lib, version_array[0], version_array[1])) - make() - make('install') -- cgit v1.2.3-70-g09d2 From b11061f99d9e501f69d6d39bae56f0b69d17eaa1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 12:40:02 -0800 Subject: Rename py-pyqt4 to py-pyqt. --- var/spack/packages/py-pyqt/package.py | 5 ++++- var/spack/packages/qt/package.py | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/var/spack/packages/py-pyqt/package.py b/var/spack/packages/py-pyqt/package.py index cb40af351a..8edca105bb 100644 --- a/var/spack/packages/py-pyqt/package.py +++ b/var/spack/packages/py-pyqt/package.py @@ -10,9 +10,12 @@ class PyPyqt(Package): version('4.11.3', '997c3e443165a89a559e0d96b061bf70') extends('python') - depends_on('qt@4') # TODO: allow qt5 when conditional deps are supported. depends_on('py-sip') + # TODO: allow qt5 when conditional deps are supported. + # TODO: Fix version matching so that @4 works like @:4 + depends_on('qt@:4') + def install(self, spec, prefix): python('configure.py', '--confirm-license', diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index fcbcd2491a..3b5096c4f0 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -43,9 +43,9 @@ class Qt(Package): '-fast', '-optimized-qmake', '-no-pch', -# phonon required for py-pyqt4 -# '-no-phonon', -# '-no-phonon-backend', + # phonon required for py-pyqt + # '-no-phonon', + # '-no-phonon-backend', '-no-openvg') make() make("install") -- cgit v1.2.3-70-g09d2 From ce011501f9b0184888ba2b6648fcf6d360f7c404 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:02:21 -0800 Subject: Add R package. --- var/spack/packages/R/package.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 var/spack/packages/R/package.py diff --git a/var/spack/packages/R/package.py b/var/spack/packages/R/package.py new file mode 100644 index 0000000000..2e6f65a742 --- /dev/null +++ b/var/spack/packages/R/package.py @@ -0,0 +1,33 @@ +from spack import * + +class R(Package): + """R is 'GNU S', a freely available language and environment for + statistical computing and graphics which provides a wide va + riety of statistical and graphical techniques: linear and + nonlinear modelling, statistical tests, time series analysis, + classification, clustering, etc. Please consult the R project + homepage for further information.""" + homepage = "http://www.example.com" + url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz" + + version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74') + + depends_on("readline") + depends_on("ncurses") + depends_on("icu") + depends_on("glib") + depends_on("zlib") + depends_on("libtiff") + depends_on("jpeg") + depends_on("cairo") + depends_on("pango") + depends_on("freetype") + depends_on("tcl") + depends_on("tk") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-R-shlib", + "--enable-BLAS-shlib") + make() + make("install") -- cgit v1.2.3-70-g09d2 From 36579844d9f105b5c9182beed83a65bf0bb556a9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:02:36 -0800 Subject: Add Tcl/Tk packages. --- var/spack/packages/tcl/package.py | 22 ++++++++++++++++++++++ var/spack/packages/tk/package.py | 22 ++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 var/spack/packages/tcl/package.py create mode 100644 var/spack/packages/tk/package.py diff --git a/var/spack/packages/tcl/package.py b/var/spack/packages/tcl/package.py new file mode 100644 index 0000000000..529adf7788 --- /dev/null +++ b/var/spack/packages/tcl/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Tcl(Package): + """Tcl (Tool Command Language) is a very powerful but easy to + learn dynamic programming language, suitable for a very wide + range of uses, including web and desktop applications, + networking, administration, testing and many more. Open source + and business-friendly, Tcl is a mature yet evolving language + that is truly cross platform, easily deployed and highly + extensible.""" + homepage = "http://www.tcl.tk" + + version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f', + url="http://prdownloads.sourceforge.net/tcl/tcl8.6.3-src.tar.gz") + + depends_on('zlib') + + def install(self, spec, prefix): + with working_dir('unix'): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/tk/package.py b/var/spack/packages/tk/package.py new file mode 100644 index 0000000000..96736f6f95 --- /dev/null +++ b/var/spack/packages/tk/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Tk(Package): + """Tk is a graphical user interface toolkit that takes developing + desktop applications to a higher level than conventional + approaches. Tk is the standard GUI not only for Tcl, but for + many other dynamic languages, and can produce rich, native + applications that run unchanged across Windows, Mac OS X, Linux + and more.""" + homepage = "http://www.tcl.tk" + url = "http://prdownloads.sourceforge.net/tcl/tk8.6.3-src.tar.gz" + + version('src', '85ca4dbf4dcc19777fd456f6ee5d0221') + + depends_on("tcl") + + def install(self, spec, prefix): + with working_dir('unix'): + configure("--prefix=%s" % prefix, + "--with-tcl=%s" % spec['tcl'].prefix.lib) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 65d60f92f5c479bdcf8b4cbfa244135a131d08f9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:02:51 -0800 Subject: qhull package. --- var/spack/packages/qhull/package.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/packages/qhull/package.py diff --git a/var/spack/packages/qhull/package.py b/var/spack/packages/qhull/package.py new file mode 100644 index 0000000000..9da4078a70 --- /dev/null +++ b/var/spack/packages/qhull/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Qhull(Package): + """Qhull computes the convex hull, Delaunay triangulation, Voronoi + diagram, halfspace intersection about a point, furt hest-site + Delaunay triangulation, and furthest-site Voronoi diagram. The + source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull + implements the Quickhull algorithm for computing the convex + hull. It handles roundoff errors from floating point + arithmetic. It computes volumes, surface areas, and + approximations to the convex hull. + + Qhull does not support triangulation of non-convex surfaces, + mesh generation of non-convex objects, medium-sized inputs in + 9-D and higher, alpha shapes, weighted Voronoi diagrams, + Voronoi volumes, or constrained Delaunay triangulations.""" + + homepage = "http://www.qhull.org" + + version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c', + url="http://www.qhull.org/download/qhull-2012.1-src.tgz") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") -- cgit v1.2.3-70-g09d2 From b86eb695523c235371169d0139486bcae15a7948 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:03:33 -0800 Subject: libgcrypt and libgpg-error packages. --- var/spack/packages/libgcrypt/package.py | 19 +++++++++++++++++++ var/spack/packages/libgpg-error/package.py | 17 +++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 var/spack/packages/libgcrypt/package.py create mode 100644 var/spack/packages/libgpg-error/package.py diff --git a/var/spack/packages/libgcrypt/package.py b/var/spack/packages/libgcrypt/package.py new file mode 100644 index 0000000000..1d0a57f317 --- /dev/null +++ b/var/spack/packages/libgcrypt/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Libgcrypt(Package): + """Libgcrypt is a general purpose cryptographic library based on + the code from GnuPG. It provides functions for all cryptographic + building blocks: symmetric ciphers, hash algorithms, MACs, public + key algorithms, large integer functions, random numbers and a lot + of supporting functions. """ + homepage = "http://www.gnu.org/software/libgcrypt/" + url = "ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2" + + version('1.6.2', 'b54395a93cb1e57619943c082da09d5f') + + depends_on("libgpg-error") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/libgpg-error/package.py b/var/spack/packages/libgpg-error/package.py new file mode 100644 index 0000000000..6c1d1a10a7 --- /dev/null +++ b/var/spack/packages/libgpg-error/package.py @@ -0,0 +1,17 @@ +from spack import * + +class LibgpgError(Package): + """Libgpg-error is a small library that defines common error + values for all GnuPG components. Among these are GPG, GPGSM, + GPGME, GPG-Agent, libgcrypt, Libksba, DirMngr, Pinentry, + SmartCard Daemon and possibly more in the future. """ + + homepage = "https://www.gnupg.org/related_software/libgpg-error" + url = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.18.tar.bz2" + + version('1.18', '12312802d2065774b787cbfc22cc04e9') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 847ed8ad399973477a7889b6367911f10c56f6bf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:04:04 -0800 Subject: Add libxslt, cleanup libxml2. --- var/spack/packages/libxml2/package.py | 3 +++ var/spack/packages/libxslt/package.py | 24 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 var/spack/packages/libxslt/package.py diff --git a/var/spack/packages/libxml2/package.py b/var/spack/packages/libxml2/package.py index 5eaed36d94..72199d8def 100644 --- a/var/spack/packages/libxml2/package.py +++ b/var/spack/packages/libxml2/package.py @@ -9,6 +9,9 @@ class Libxml2(Package): version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788') + depends_on('zlib') + depends_on('xz') + def install(self, spec, prefix): configure("--prefix=%s" % prefix, "--without-python") diff --git a/var/spack/packages/libxslt/package.py b/var/spack/packages/libxslt/package.py new file mode 100644 index 0000000000..f97332d020 --- /dev/null +++ b/var/spack/packages/libxslt/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Libxslt(Package): + """Libxslt is the XSLT C library developed for the GNOME + project. XSLT itself is a an XML language to define + transformation for XML. Libxslt is based on libxml2 the XML C + library developed for the GNOME project. It also implements + most of the EXSLT set of processor-portable extensions + functions and some of Saxon's evaluate and expressions + extensions.""" + homepage = "http://www.xmlsoft.org/XSLT/index.html" + url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz" + + version('1.1.28', '9667bf6f9310b957254fdcf6596600b7') + + depends_on("libxml2") + depends_on("xz") + depends_on("zlib") + depends_on("libgcrypt") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") -- cgit v1.2.3-70-g09d2 From 8aa3afcfde9cebd34fdb534141c258c214ae4132 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Feb 2015 23:04:20 -0800 Subject: Python package cleanup. - Added a number of dependencies to python packages. - Python packages may still not build without some OS support. - Example: Numpy needs ATLAS, and will use a system ATLAS install. - Atlas requires turning off CPU throttling to build. - can't do this as a regular user -- how to build ATLAS with Spack - currnetly relying on a system ATLAS install. --- var/spack/packages/py-libxml2/package.py | 1 + var/spack/packages/py-matplotlib/package.py | 6 +++++- var/spack/packages/py-pyside/package.py | 1 - var/spack/packages/py-rpy2/package.py | 2 ++ var/spack/packages/py-scientificpython/package.py | 6 +++++- var/spack/packages/py-shiboken/package.py | 19 +++++++++++++++++++ 6 files changed, 32 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/py-libxml2/package.py b/var/spack/packages/py-libxml2/package.py index e645acb5dd..59005428e4 100644 --- a/var/spack/packages/py-libxml2/package.py +++ b/var/spack/packages/py-libxml2/package.py @@ -9,6 +9,7 @@ class PyLibxml2(Package): extends('python') depends_on('libxml2') + depends_on('libxslt') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py index 8b8684c563..f6b9c587fd 100644 --- a/var/spack/packages/py-matplotlib/package.py +++ b/var/spack/packages/py-matplotlib/package.py @@ -17,8 +17,12 @@ class PyMatplotlib(Package): depends_on('py-pytz') depends_on('py-nose') depends_on('py-numpy') - depends_on('qt') + depends_on('qt') + depends_on('bzip2') + depends_on('tcl') + depends_on('tk') + depends_on('qhull') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py index 1fd037d75f..c165d9b3bf 100644 --- a/var/spack/packages/py-pyside/package.py +++ b/var/spack/packages/py-pyside/package.py @@ -13,7 +13,6 @@ class PyPyside(Package): depends_on('py-setuptools') depends_on('qt@:4') - def patch(self): """Undo PySide RPATH handling and add Spack RPATH.""" # Add Spack's standard CMake args to the sub-builds. diff --git a/var/spack/packages/py-rpy2/package.py b/var/spack/packages/py-rpy2/package.py index 3817059911..dd0c0672af 100644 --- a/var/spack/packages/py-rpy2/package.py +++ b/var/spack/packages/py-rpy2/package.py @@ -10,5 +10,7 @@ class PyRpy2(Package): extends('python') depends_on('py-setuptools') + depends_on('R') + def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scientificpython/package.py b/var/spack/packages/py-scientificpython/package.py index 73600e6cb9..020d830703 100644 --- a/var/spack/packages/py-scientificpython/package.py +++ b/var/spack/packages/py-scientificpython/package.py @@ -1,7 +1,11 @@ from spack import * class PyScientificpython(Package): - """ScientificPython is a collection of Python modules for scientific computing. It contains support for geometry, mathematical functions, statistics, physical units, IO, visualization, and parallelization.""" + """ScientificPython is a collection of Python modules for + scientific computing. It contains support for geometry, + mathematical functions, statistics, physical units, IO, + visualization, and parallelization.""" + homepage = "https://sourcesup.renater.fr/projects/scientific-py/" url = "https://sourcesup.renater.fr/frs/download.php/4411/ScientificPython-2.8.1.tar.gz" diff --git a/var/spack/packages/py-shiboken/package.py b/var/spack/packages/py-shiboken/package.py index 47abe64e65..e900947939 100644 --- a/var/spack/packages/py-shiboken/package.py +++ b/var/spack/packages/py-shiboken/package.py @@ -15,6 +15,25 @@ class PyShiboken(Package): depends_on("libxml2") depends_on("qt@:4.8") + def patch(self): + """Undo Shiboken RPATH handling and add Spack RPATH.""" + # Add Spack's standard CMake args to the sub-builds. + # They're called BY setup.py so we have to patch it. + filter_file( + r'OPTION_CMAKE,', + r'OPTION_CMAKE, ' + ( + '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(self.rpath)), + 'setup.py') + + # Shiboken tries to patch ELF files to remove RPATHs + # Disable this and go with the one we set. + filter_file( + r'^\s*rpath_cmd\(shiboken_path, srcpath\)', + r'#rpath_cmd(shiboken_path, srcpath)', + 'shiboken_postinstall.py') + + def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix, -- cgit v1.2.3-70-g09d2 From 614c22fc1b1ac10c85ed9e27a1e59eeb88de0898 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 16 Feb 2015 12:41:22 -0800 Subject: Allow forced deactivation -- best effort unlinking spack deactivate -f will unlink even if Spack thinks the package isn't enabled. Made deactivate routines idempotent. --- lib/spack/llnl/util/link_tree.py | 4 ++++ lib/spack/spack/cmd/deactivate.py | 7 +++++-- lib/spack/spack/cmd/find.py | 3 +++ var/spack/packages/python/package.py | 5 +++-- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index 4e4e48316e..4d778eca1e 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -175,6 +175,10 @@ class LinkTree(object): kwargs['order'] = 'post' for src, dest in traverse_tree(self._root, dest_root, **kwargs): if os.path.isdir(src): + # Skip non-existing links. + if not os.path.exists(dest): + continue + if not os.path.isdir(dest): raise ValueError("File blocks directory: %s" % dest) diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index fd13f051df..f37dfd79ed 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -30,6 +30,9 @@ import spack.cmd description = "Deactivate a package extension." def setup_parser(subparser): + subparser.add_argument( + '-f', '--force', action='store_true', + help="Run deactivation even if spec is NOT currently activated.") subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") @@ -44,7 +47,7 @@ def deactivate(parser, args): spack.db.get(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0]) - if not spec.package.activated: + if not args.force and not spec.package.activated: tty.die("Package %s is not activated." % specs[0].short_spec) - spec.package.do_deactivate() + spec.package.do_deactivate(force=args.force) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index f6f503afe5..dee1dfece7 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -122,5 +122,8 @@ def find(parser, args): if not args.mode: args.mode = 'short' + + if sys.stdout.isatty(): + tty.msg("%d installed packages." % len(specs)) display_specs(specs, mode=args.mode) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index fb875a7eeb..705d002e80 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -155,5 +155,6 @@ class Python(Package): super(Python, self).deactivate(ext_pkg, **args) exts = spack.install_layout.extension_map(self.spec) - del exts[ext_pkg.name] - self.write_easy_install_pth(exts) + if ext_pkg.name in exts: # Make deactivate idempotent. + del exts[ext_pkg.name] + self.write_easy_install_pth(exts) -- cgit v1.2.3-70-g09d2 From e6b2c2701184dd9128b729e11fc1f142272caa1d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 16 Feb 2015 20:38:22 -0800 Subject: Factor out forking logic to build_environment.py. --- lib/spack/spack/build_environment.py | 56 ++++++++++++++++++++++++++++++++++++ lib/spack/spack/package.py | 39 +++++-------------------- 2 files changed, 63 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index cabde7dc86..84d2bd77ef 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -28,6 +28,7 @@ Skimming this module is a nice way to get acquainted with the types of calls you can make from within the install() function. """ import os +import sys import shutil import multiprocessing import platform @@ -212,3 +213,58 @@ def setup_package(pkg): for dep_spec in pkg.spec.traverse(root=False): dep_spec.package.setup_dependent_environment( pkg.module, dep_spec, pkg.spec) + + +def fork(pkg, function): + """Fork a child process to do part of a spack build. + + Arguments: + + pkg -- pkg whose environemnt we should set up the + forked process for. + function -- arg-less function to run in the child process. + + Usage: + def child_fun(): + # do stuff + build_env.fork(pkg, child_fun) + + Forked processes are run with the build environemnt set up by + spack.build_environment. This allows package authors to have + full control over the environment, etc. without offecting + other builds that might be executed in the same spack call. + + If something goes wrong, the child process is expected toprint + the error and the parent process will exit with error as + well. If things go well, the child exits and the parent + carries on. + """ + try: + pid = os.fork() + except OSError, e: + raise InstallError("Unable to fork build process: %s" % e) + + if pid == 0: + # Give the child process the package's build environemnt. + setup_package(pkg) + + try: + # call the forked function. + function() + + # Use os._exit here to avoid raising a SystemExit exception, + # which interferes with unit tests. + os._exit(0) + except: + # Child doesn't raise or return to main spack code. + # Just runs default exception handler and exits. + sys.excepthook(*sys.exc_info()) + os._exit(1) + + else: + # Parent process just waits for the child to complete. If the + # child exited badly, assume it already printed an appropriate + # message. Just make the parent exit with an error code. + pid, returncode = os.waitpid(pid, 0) + if returncode != 0: + sys.exit(1) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c48816cb5b..5d04fed8ff 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -804,32 +804,21 @@ class Package(object): if not fake_install: self.do_patch() - # Fork a child process to do the build. This allows each - # package authors to have full control over their environment, - # etc. without offecting other builds that might be executed - # in the same spack call. - try: - pid = os.fork() - except OSError, e: - raise InstallError("Unable to fork build process: %s" % e) + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.make_path_for_spec(self.spec) - if pid == 0: + def real_work(): try: tty.msg("Building %s." % self.name) - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.make_path_for_spec(self.spec) - # Run the pre-install hook in the child process after # the directory is created. spack.hooks.pre_install(self) # Set up process's build environment before running install. self.stage.chdir_to_source() - build_env.setup_package(self) - if fake_install: self.do_fake_install() else: @@ -852,10 +841,6 @@ class Package(object): % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) print_pkg(self.prefix) - # Use os._exit here to avoid raising a SystemExit exception, - # which interferes with unit tests. - os._exit(0) - except: if not keep_prefix: # If anything goes wrong, remove the install prefix @@ -865,24 +850,14 @@ class Package(object): "Spack will think this package is installed." + "Manually remove this directory to fix:", self.prefix) + raise - # Child doesn't raise or return to main spack code. - # Just runs default exception handler and exits. - sys.excepthook(*sys.exc_info()) - os._exit(1) - - # Parent process just waits for the child to complete. If the - # child exited badly, assume it already printed an appropriate - # message. Just make the parent exit with an error code. - pid, returncode = os.waitpid(pid, 0) - if returncode != 0: - sys.exit(1) + build_env.fork(self, real_work) # Once everything else is done, run post install hooks spack.hooks.post_install(self) - def _sanity_check_install(self): installed = set(os.listdir(self.prefix)) installed.difference_update(spack.install_layout.hidden_file_paths) -- cgit v1.2.3-70-g09d2 From 13376efafc42c6eeb1bf0ad3b35f509924f1a6df Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 16 Feb 2015 21:53:34 -0800 Subject: Add package-specific rpath back to shiboken and pyside. --- var/spack/packages/py-pyside/package.py | 13 ++++++++++--- var/spack/packages/py-shiboken/package.py | 7 ++++++- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py index c165d9b3bf..6583431124 100644 --- a/var/spack/packages/py-pyside/package.py +++ b/var/spack/packages/py-pyside/package.py @@ -1,5 +1,4 @@ from spack import * -import spack.package import os class PyPyside(Package): @@ -9,25 +8,33 @@ class PyPyside(Package): version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d') + # TODO: make build dependency + # depends_on("cmake") + extends('python') depends_on('py-setuptools') depends_on('qt@:4') def patch(self): """Undo PySide RPATH handling and add Spack RPATH.""" + # Figure out the special RPATH + pypkg = self.spec['python'].package + rpath = self.rpath + rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) + # Add Spack's standard CMake args to the sub-builds. # They're called BY setup.py so we have to patch it. filter_file( r'OPTION_CMAKE,', r'OPTION_CMAKE, ' + ( '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' - '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(self.rpath)), + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), 'setup.py') # PySide tries to patch ELF files to remove RPATHs # Disable this and go with the one we set. filter_file( - r'rpath_cmd\(pyside_path, srcpath\)', + r'^\s*rpath_cmd\(pyside_path, srcpath\)', r'#rpath_cmd(pyside_path, srcpath)', 'pyside_postinstall.py') diff --git a/var/spack/packages/py-shiboken/package.py b/var/spack/packages/py-shiboken/package.py index e900947939..e4bf4ce07e 100644 --- a/var/spack/packages/py-shiboken/package.py +++ b/var/spack/packages/py-shiboken/package.py @@ -1,4 +1,5 @@ from spack import * +import os class PyShiboken(Package): """Shiboken generates bindings for C++ libraries using CPython source code.""" @@ -19,11 +20,15 @@ class PyShiboken(Package): """Undo Shiboken RPATH handling and add Spack RPATH.""" # Add Spack's standard CMake args to the sub-builds. # They're called BY setup.py so we have to patch it. + pypkg = self.spec['python'].package + rpath = self.rpath + rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken')) + filter_file( r'OPTION_CMAKE,', r'OPTION_CMAKE, ' + ( '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' - '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(self.rpath)), + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), 'setup.py') # Shiboken tries to patch ELF files to remove RPATHs -- cgit v1.2.3-70-g09d2 From 06d6b0b205095f849ace216aa51393b907cf821b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 16 Feb 2015 21:53:55 -0800 Subject: More py-setuptools dependencies added. --- var/spack/packages/py-ipython/package.py | 1 + var/spack/packages/py-mpi4py/package.py | 1 + var/spack/packages/py-nose/package.py | 1 + var/spack/packages/py-pylint/package.py | 4 ++-- var/spack/packages/py-virtualenv/package.py | 1 + 5 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/py-ipython/package.py b/var/spack/packages/py-ipython/package.py index 731e661dfd..907ea9edcd 100644 --- a/var/spack/packages/py-ipython/package.py +++ b/var/spack/packages/py-ipython/package.py @@ -9,6 +9,7 @@ class PyIpython(Package): extends('python') depends_on('py-pygments') + depends_on('py-setuptools') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mpi4py/package.py b/var/spack/packages/py-mpi4py/package.py index fdea340dc2..8001689a18 100644 --- a/var/spack/packages/py-mpi4py/package.py +++ b/var/spack/packages/py-mpi4py/package.py @@ -7,6 +7,7 @@ class PyMpi4py(Package): version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c') extends('python') + depends_on('py-setuptools') depends_on('mpi') def install(self, spec, prefix): diff --git a/var/spack/packages/py-nose/package.py b/var/spack/packages/py-nose/package.py index 6df84e831d..b902a35fbb 100644 --- a/var/spack/packages/py-nose/package.py +++ b/var/spack/packages/py-nose/package.py @@ -10,6 +10,7 @@ class PyNose(Package): version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') extends('python') + depends_on('py-setuptools') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pylint/package.py b/var/spack/packages/py-pylint/package.py index ebde861f94..7a6ee7dbbc 100644 --- a/var/spack/packages/py-pylint/package.py +++ b/var/spack/packages/py-pylint/package.py @@ -8,9 +8,9 @@ class PyPylint(Package): version('1.4.1', 'df7c679bdcce5019389038847e4de622') -# extends('python') - extends('python', ignore=lambda f:re.match(r"site.py*", f)) + extends('python') depends_on('py-nose') + depends_on('py-setuptools') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-virtualenv/package.py b/var/spack/packages/py-virtualenv/package.py index c1b359e164..9d94c2dcda 100644 --- a/var/spack/packages/py-virtualenv/package.py +++ b/var/spack/packages/py-virtualenv/package.py @@ -9,6 +9,7 @@ class PyVirtualenv(Package): version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49') extends('python') + depends_on('py-setuptools') def clean(self): if os.path.exists('build'): -- cgit v1.2.3-70-g09d2 From 67db8ddca8ac7ab9adeb827a7dadd34a385b2b6b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 17 Feb 2015 00:21:15 -0800 Subject: Factor ignore logic into a predicate builder. --- lib/spack/llnl/util/lang.py | 31 +++++++++++++++++++++++++++++++ var/spack/packages/python/package.py | 25 +++++++++++-------------- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index db15da0506..332367f537 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -291,6 +291,37 @@ def check_kwargs(kwargs, fun): % (next(kwargs.iterkeys()), fun.__name__)) +def match_predicate(*args): + """Utility function for making string matching predicates. + + Each arg can be a: + - regex + - list or tuple of regexes + - predicate that takes a string. + + This returns a predicate that is true if: + - any arg regex matches + - any regex in a list or tuple of regexes matches. + - any predicate in args matches. + """ + def match(string): + for arg in args: + if isinstance(arg, basestring): + if re.search(arg, string): + return True + elif isinstance(arg, list) or isinstance(arg, tuple): + if any(re.search(i, string) for i in arg): + return True + elif callable(arg): + if arg(string): + return True + else: + raise ValueError("args to match_predicate must be regex, " + "list of regexes, or callable.") + return False + return match + + class RequiredAttributeError(ValueError): def __init__(self, message): super(RequiredAttributeError, self).__init__(message) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 705d002e80..31a12ea653 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -1,6 +1,7 @@ import os import re from contextlib import closing +from llnl.util.lang import match_predicate from spack import * import spack @@ -85,23 +86,19 @@ class Python(Package): def python_ignore(self, ext_pkg, args): """Add some ignore files to activate/deactivate args.""" - orig_ignore = args.get('ignore', lambda f: False) + ignore_arg = args.get('ignore', lambda f: False) - def ignore(filename): - # Always ignore easy-install.pth, as it needs to be merged. - patterns = [r'easy-install\.pth$'] + # Always ignore easy-install.pth, as it needs to be merged. + patterns = [r'easy-install\.pth$'] - # Ignore pieces of setuptools installed by other packages. - if ext_pkg.name != 'py-setuptools': - patterns.append(r'/site\.pyc?$') - patterns.append(r'setuptools\.pth') - patterns.append(r'bin/easy_install[^/]*$') - patterns.append(r'setuptools.*egg$') + # Ignore pieces of setuptools installed by other packages. + if ext_pkg.name != 'py-setuptools': + patterns.append(r'/site\.pyc?$') + patterns.append(r'setuptools\.pth') + patterns.append(r'bin/easy_install[^/]*$') + patterns.append(r'setuptools.*egg$') - return (any(re.search(p, filename) for p in patterns) or - orig_ignore(filename)) - - return ignore + return match_predicate(ignore_arg, patterns) def write_easy_install_pth(self, exts): -- cgit v1.2.3-70-g09d2 From 57f331e2acf75c5bf4c464d0df888fd882295a68 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 17 Feb 2015 00:22:18 -0800 Subject: Ignore conflicting nose tests in py-nose and py-matplotlib. --- var/spack/packages/py-matplotlib/package.py | 3 ++- var/spack/packages/py-nose/package.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py index f6b9c587fd..04037f004e 100644 --- a/var/spack/packages/py-matplotlib/package.py +++ b/var/spack/packages/py-matplotlib/package.py @@ -8,7 +8,8 @@ class PyMatplotlib(Package): version('1.4.2', '7d22efb6cce475025733c50487bd8898') - extends('python') + extends('python', ignore=r'bin/nosetests.*$') + depends_on('py-pyside') depends_on('py-ipython') depends_on('py-pyparsing') diff --git a/var/spack/packages/py-nose/package.py b/var/spack/packages/py-nose/package.py index b902a35fbb..155019289d 100644 --- a/var/spack/packages/py-nose/package.py +++ b/var/spack/packages/py-nose/package.py @@ -9,7 +9,7 @@ class PyNose(Package): version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') - extends('python') + extends('python', ignore=r'bin/nosetests.*$') depends_on('py-setuptools') def install(self, spec, prefix): -- cgit v1.2.3-70-g09d2 From d800c23cecd9c87b17991411512e3aa42855815d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 17 Feb 2015 00:24:58 -0800 Subject: Better activate/deactivate logic. spack activate - now activates dependency extensions - ensures dependencies are activated in the python installation. - -f/--force option still allows the old activate behavior. spack deactivate - checks for dependents before deactivating (like uninstall) - deactivate -a/--all will deactviate a package and ALL of its dependency extensions. - deactivate -a/--all activates all extensions of e.g.: spack deactivate -a python - deactivate -f/--force option allows removing regardless of dependents. - deactivate -f can be run EVEN if a package is not activated. - allows for clenup of activations gone wrong. --- lib/spack/spack/cmd/activate.py | 6 +++++ lib/spack/spack/cmd/deactivate.py | 54 ++++++++++++++++++++++++++++++++++--- lib/spack/spack/cmd/find.py | 2 +- lib/spack/spack/directory_layout.py | 8 +++--- lib/spack/spack/hooks/extensions.py | 2 +- lib/spack/spack/package.py | 42 ++++++++++++++++++++++------- 6 files changed, 95 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py index c1e23852d6..71eca4f453 100644 --- a/lib/spack/spack/cmd/activate.py +++ b/lib/spack/spack/cmd/activate.py @@ -30,6 +30,9 @@ import spack.cmd description = "Activate a package extension." def setup_parser(subparser): + subparser.add_argument( + '-f', '--force', action='store_true', + help="Activate without first activating dependencies.") subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") @@ -44,6 +47,9 @@ def activate(parser, args): spack.db.get(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0]) + if not spec.package.is_extension: + tty.die("%s is not an extension." % spec.name) + if spec.package.activated: tty.die("Package %s is already activated." % specs[0].short_spec) diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index f37dfd79ed..bfec618c8e 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -24,8 +24,10 @@ ############################################################################## from external import argparse import llnl.util.tty as tty + import spack import spack.cmd +from spack.graph import topological_sort description = "Deactivate a package extension." @@ -33,6 +35,10 @@ def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', help="Run deactivation even if spec is NOT currently activated.") + subparser.add_argument( + '-a', '--all', action='store_true', + help="Deactivate all extensions of an extendable pacakge, or " + "deactivate an extension AND its dependencies.") subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") @@ -42,12 +48,52 @@ def deactivate(parser, args): if len(specs) != 1: tty.die("deactivate requires one spec. %d given." % len(specs)) - # TODO: remove this hack when DAG info is stored in dir layout. + # TODO: remove this hack when DAG info is stored properly. # This ensures the ext spec is always normalized properly. spack.db.get(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0]) - if not args.force and not spec.package.activated: - tty.die("Package %s is not activated." % specs[0].short_spec) + pkg = spec.package + + if args.all: + if pkg.extendable: + tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) + ext_pkgs = spack.db.installed_extensions_for(spec) + for ext_pkg in ext_pkgs: + ext_pkg.spec.normalize() + if ext_pkg.activated: + ext_pkg.do_deactivate(force=True) + + elif pkg.is_extension: + # TODO: store DAG info properly (see above) + spec.normalize() + + tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec) + + topo_order = topological_sort(spec) + index = spec.index() + + for name in topo_order: + espec = index[name] + epkg = espec.package + + # TODO: store DAG info properly (see above) + epkg.spec.normalize() + + if epkg.extends(pkg.extendee_spec): + if epkg.activated or args.force: + + epkg.do_deactivate(force=args.force) + + else: + tty.die("spack deactivate --all requires an extendable package or an extension.") + + else: + if not pkg.is_extension: + tty.die("spack deactivate requires an extension.", + "Did you mean 'spack deactivate --all'?") + + if not args.force and not spec.package.activated: + tty.die("Package %s is not activated." % specs[0].short_spec) - spec.package.do_deactivate(force=args.force) + spec.package.do_deactivate(force=args.force) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index dee1dfece7..70b10edb4e 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -85,7 +85,7 @@ def display_specs(specs, **kwargs): elif mode == 'deps': for spec in specs: - print spec.tree(indent=4, format='$_$@$+', color=True), + print spec.tree(indent=4, format='$_$@$+$#', color=True), elif mode in ('short', 'long'): fmt = '$-_$@$+' diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 5b80e93d6b..b2cf5dc801 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -371,7 +371,7 @@ class SpecHashDirectoryLayout(DirectoryLayout): _check_concrete(ext_spec) # Check whether it's already installed or if it's a conflict. - exts = self.extension_map(spec) + exts = self._extension_map(spec) self.check_extension_conflict(spec, ext_spec) # do the actual adding. @@ -384,7 +384,7 @@ class SpecHashDirectoryLayout(DirectoryLayout): _check_concrete(ext_spec) # Make sure it's installed before removing. - exts = self.extension_map(spec) + exts = self._extension_map(spec) self.check_activated(spec, ext_spec) # do the actual removing. @@ -450,10 +450,10 @@ class ExtensionConflictError(DirectoryLayoutError): class NoSuchExtensionError(DirectoryLayoutError): - """Raised when an extension isn't there on remove.""" + """Raised when an extension isn't there on deactivate.""" def __init__(self, spec, ext_spec): super(NoSuchExtensionError, self).__init__( - "%s cannot be removed from %s because it's not installed."% ( + "%s cannot be removed from %s because it's not activated."% ( ext_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index 9d6fa23d03..cf87a78c8c 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -33,4 +33,4 @@ def pre_uninstall(pkg): if pkg.is_extension: if pkg.activated: - pkg.do_deactivate() + pkg.do_deactivate(force=True) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 5d04fed8ff..bc8541a184 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -529,11 +529,8 @@ class Package(object): @property def activated(self): - if not self.spec.concrete: - raise ValueError("Only concrete package extensions can be activated.") if not self.is_extension: raise ValueError("is_extension called on package that is not an extension.") - exts = spack.install_layout.extension_map(self.extendee_spec) return (self.name in exts) and (exts[self.name] == self.spec) @@ -956,20 +953,33 @@ class Package(object): raise ValueError("%s does not extend %s!" % (self.name, self.extendee.name)) - def do_activate(self): + def do_activate(self, **kwargs): """Called on an etension to invoke the extendee's activate method. Commands should call this routine, and should not call activate() directly. """ self._sanity_check_extension() + force = kwargs.get('force', False) + + # TODO: get rid of this normalize - DAG handling. + self.spec.normalize() + spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec) + if not force: + for spec in self.spec.traverse(root=False): + if spec.package.extends(self.extendee_spec): + # TODO: fix this normalize() requirement -- revisit DAG handling. + spec.package.spec.normalize() + if not spec.package.activated: + spec.package.do_activate(**kwargs) + self.extendee_spec.package.activate(self, **self.extendee_args) spack.install_layout.add_extension(self.extendee_spec, self.spec) tty.msg("Activated extension %s for %s." - % (self.spec.short_spec, self.extendee_spec.short_spec)) + % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@"))) def activate(self, extension, **kwargs): @@ -994,15 +1004,21 @@ class Package(object): def do_deactivate(self, **kwargs): """Called on the extension to invoke extendee's deactivate() method.""" - force = kwargs.get('force', False) - self._sanity_check_extension() + force = kwargs.get('force', False) # Allow a force deactivate to happen. This can unlink # spurious files if something was corrupted. if not force: spack.install_layout.check_activated(self.extendee_spec, self.spec) + activated = spack.install_layout.extension_map(self.extendee_spec) + for name, aspec in activated.items(): + if aspec != self.spec and self.spec in aspec: + raise ActivationError( + "Cannot deactivate %s beacuse %s is activated and depends on it." + % (self.spec.short_spec, aspec.short_spec)) + self.extendee_spec.package.deactivate(self, **self.extendee_args) # redundant activation check -- makes SURE the spec is not @@ -1011,7 +1027,7 @@ class Package(object): spack.install_layout.remove_extension(self.extendee_spec, self.spec) tty.msg("Deactivated extension %s for %s." - % (self.spec.short_spec, self.extendee_spec.short_spec)) + % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@"))) def deactivate(self, extension, **kwargs): @@ -1236,7 +1252,15 @@ class NoURLError(PackageError): "Package %s has no version with a URL." % cls.__name__) -class ExtensionConflictError(PackageError): +class ExtensionError(PackageError): pass + + +class ExtensionConflictError(ExtensionError): def __init__(self, path): super(ExtensionConflictError, self).__init__( "Extension blocked by file: %s" % path) + + +class ActivationError(ExtensionError): + def __init__(self, msg, long_msg=None): + super(ActivationError, self).__init__(msg, long_msg) -- cgit v1.2.3-70-g09d2 From 724b72bdaf5c7c569e502e383e2aadcb8105983c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 17 Feb 2015 00:47:35 -0800 Subject: take out dyninst 8.2 for now. - doesn't build correctly with boost 1.55 --- var/spack/packages/dyninst/package.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/dyninst/package.py b/var/spack/packages/dyninst/package.py index 2e6f3e010a..f3d661f9a9 100644 --- a/var/spack/packages/dyninst/package.py +++ b/var/spack/packages/dyninst/package.py @@ -31,8 +31,9 @@ class Dyninst(Package): url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" list_url = "http://www.dyninst.org/downloads/dyninst-8.x" - version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', - url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") +# Doesn't build right with boost@1.55.0 +# version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', +# url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a', url="http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz") version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac', @@ -40,7 +41,7 @@ class Dyninst(Package): depends_on("libelf") depends_on("libdwarf") - depends_on("boost@1.42:1.43") + depends_on("boost@1.42:") # new version uses cmake def install(self, spec, prefix): -- cgit v1.2.3-70-g09d2 From 44003449d594e9ba8ea1adaed84c7c938b439972 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Tue, 17 Feb 2015 16:26:00 -0800 Subject: fixed install steps for version 4 --- var/spack/packages/qt/package.py | 55 ++++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 30f46c08dc..1535bd5948 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -63,24 +63,41 @@ class Qt(Package): def install(self, spec, prefix): - # Apparently this is the only way to - # "truly" get rid of webkit compiles now... - os.rename("qtwebkit","no-qtwebkit") - os.rename("qtwebkit-examples","no-qtwebkit-examples") - configure('-v', - '-confirm-license', - '-opensource', - '-prefix', prefix, - '-openssl-linked', - '-dbus-linked', - #'-fast', - '-opengl', - '-qt-xcb', - '-optimized-qmake', - '-no-pch', - # phonon required for py-pyqt - # '-no-phonon', - # '-no-phonon-backend', - '-no-openvg') + if self.spec.satisfies('@4'): + configure('-v', + '-confirm-license', + '-opensource', + '-prefix', prefix, + '-openssl-linked', + '-dbus-linked', + #'-fast', + '-opengl', + '-optimized-qmake', + '-no-pch', + # phonon required for py-pyqt + # '-no-phonon', + # '-no-phonon-backend', + '-no-openvg') + elif self.spec.satisfies('@5'): + # Apparently this is the only way to + # "truly" get rid of webkit compiles now... + os.rename("qtwebkit","no-qtwebkit") + os.rename("qtwebkit-examples","no-qtwebkit-examples") + + configure('-v', + '-confirm-license', + '-opensource', + '-prefix', prefix, + '-openssl-linked', + '-dbus-linked', + #'-fast', + '-opengl', + '-qt-xcb', + '-optimized-qmake', + '-no-pch', + # phonon required for py-pyqt + # '-no-phonon', + # '-no-phonon-backend', + '-no-openvg') make() make("install") -- cgit v1.2.3-70-g09d2 From 14097e39ccc77e03574119eac59524ad2bb60c6f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 17 Feb 2015 01:28:47 -0800 Subject: Suppress download status meter when routing I/O to a file. --- lib/spack/spack/fetch_strategy.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index a71f3a1531..48313e2b37 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -41,6 +41,7 @@ in order to build it. They need to define the following methods: Archive a source directory, e.g. for creating a mirror. """ import os +import sys import re import shutil from functools import wraps @@ -141,13 +142,19 @@ class URLFetchStrategy(FetchStrategy): tty.msg("Trying to fetch from %s" % self.url) + curl_args = ['-O', # save file to disk + '-f', # fail on >400 errors + '-D', '-', # print out HTML headers + '-L', self.url,] + + if sys.stdout.isatty(): + curl_args.append('-#') # status bar when using a tty + else: + curl_args.append('-sS') # just errors when not. + # Run curl but grab the mime type from the http headers - headers = spack.curl('-#', # status bar - '-O', # save file to disk - '-f', # fail on >400 errors - '-D', '-', # print out HTML headers - '-L', self.url, - return_output=True, fail_on_error=False) + headers = spack.curl( + *curl_args, return_output=True, fail_on_error=False) if spack.curl.returncode != 0: # clean up archive on failure. -- cgit v1.2.3-70-g09d2 From 959ce4f98577c872c69a29a30a2e0659e12cff75 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 10:43:29 -0800 Subject: Downgrade standard version of ImageMagick to a non-changing URL. - bleeding edge still available but commented by default. --- var/spack/packages/ImageMagick/package.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/var/spack/packages/ImageMagick/package.py b/var/spack/packages/ImageMagick/package.py index 657b9255a3..753ea80ca6 100644 --- a/var/spack/packages/ImageMagick/package.py +++ b/var/spack/packages/ImageMagick/package.py @@ -3,18 +3,32 @@ from spack import * class Imagemagick(Package): """ImageMagick is a image processing library""" homepage = "http://www.imagemagic.org" - url = "http://www.imagemagick.org/download/ImageMagick-6.8.9-10.tar.gz" - version('6.9.0-0', '2cf094cb86ec518fa5bc669ce2d21613') - version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c') - version('6.8.9-9', 'e63fed3e3550851328352c708f800676') + #------------------------------------------------------------------------- + # ImageMagick does not keep around anything but *-10 versions, so + # this URL may change. If you want the bleeding edge, you can + # uncomment it and see if it works but you may need to try to + # fetch a newer version (-6, -7, -8, -9, etc.) or you can stick + # wtih the older, stable, archived -10 versions below. + # + # TODO: would be nice if spack had a way to recommend avoiding a + # TODO: bleeding edge version, but not comment it out. + # ------------------------------------------------------------------------- + # version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b', + # url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2") + + #------------------------------------------------------------------------- + # *-10 versions are archived, so these versions should fetch reliably. + # ------------------------------------------------------------------------- + version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c', + url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download") depends_on('libtool') depends_on('jpeg') depends_on('libpng') depends_on('freetype') depends_on('fontconfig') -# depends_on('libtiff') + depends_on('libtiff') def install(self, spec, prefix): configure("--prefix=%s" % prefix) -- cgit v1.2.3-70-g09d2 From 3e5aa4b0f5c2b1fba3a81abb465298de93009532 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 10:58:10 -0800 Subject: llvm/clang version bump --- var/spack/packages/clang/package.py | 5 ++--- var/spack/packages/llvm/package.py | 14 ++++++-------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/var/spack/packages/clang/package.py b/var/spack/packages/clang/package.py index b0097bd126..4f10385dbd 100644 --- a/var/spack/packages/clang/package.py +++ b/var/spack/packages/clang/package.py @@ -29,11 +29,10 @@ class Clang(Package): Objective C and Objective C++ front-end for the LLVM compiler. """ homepage = "http://clang.llvm.org" - url = "http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.gz" + list_url = "http://llvm.org/releases/download.html" depends_on("llvm") - - version('3.4.2', '87945973b7c73038871c5f849a818588') + version('3.4.2', '87945973b7c73038871c5f849a818588', url='http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.xz') def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py index 69354a5c90..9d2be690bb 100644 --- a/var/spack/packages/llvm/package.py +++ b/var/spack/packages/llvm/package.py @@ -32,15 +32,13 @@ class Llvm(Package): it is the full name of the project. """ homepage = "http://llvm.org/" - url = "http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz" + list_url = "http://llvm.org/releases/download.html" - version('3.4.2', 'a20669f75967440de949ac3b1bad439c') - version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', - url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') - version('2.9', '793138412d2af2c7c7f54615f8943771', - url='http://llvm.org/releases/2.9/llvm-2.9.tgz') - version('2.8', '220d361b4d17051ff4bb21c64abe05ba', - url='http://llvm.org/releases/2.8/llvm-2.8.tgz') + version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz') + version('3.4.2', 'a20669f75967440de949ac3b1bad439c', url='http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz') + version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') + version('2.9', '793138412d2af2c7c7f54615f8943771', url='http://llvm.org/releases/2.9/llvm-2.9.tgz') + version('2.8', '220d361b4d17051ff4bb21c64abe05ba', url='http://llvm.org/releases/2.8/llvm-2.8.tgz') def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag -- cgit v1.2.3-70-g09d2 From 6e13d0985cf457b3d2b64908c6652ec4fc2349f0 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Wed, 18 Feb 2015 13:13:19 -0800 Subject: fixed deps for python packages --- var/spack/packages/py-dateutil/package.py | 1 + var/spack/packages/py-six/package.py | 1 + 2 files changed, 2 insertions(+) diff --git a/var/spack/packages/py-dateutil/package.py b/var/spack/packages/py-dateutil/package.py index 11699e07ee..3bd2f2ca13 100644 --- a/var/spack/packages/py-dateutil/package.py +++ b/var/spack/packages/py-dateutil/package.py @@ -9,6 +9,7 @@ class PyDateutil(Package): extends('python') depends_on('py-setuptools') + depends_on('py-six') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-six/package.py b/var/spack/packages/py-six/package.py index 04d29adced..05c5bd00a9 100644 --- a/var/spack/packages/py-six/package.py +++ b/var/spack/packages/py-six/package.py @@ -8,6 +8,7 @@ class PySix(Package): version('1.9.0', '476881ef4012262dfc8adc645ee786c4') extends('python') + depends_on('py-setuptools') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From db113733513345059b0a6d36aae8cddc56e6560a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 11:08:25 -0800 Subject: Resurrect combined qt4/5 package from b7dacb --- var/spack/packages/qt/package.py | 77 +++++++++++++++++++++------------------- 1 file changed, 41 insertions(+), 36 deletions(-) diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 1535bd5948..1dc3e1e51d 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -62,42 +62,47 @@ class Qt(Package): filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) + + @property + def common_config_args(self): + return [ + '-prefix', self.prefix, + '-v', + '-opensource', + '-opengl', + "-release", + '-shared', + '-confirm-license', + '-openssl-linked', + '-dbus-linked', + '-optimized-qmake', + '-no-openvg', + '-no-pch', + # NIS is deprecated in more recent glibc + "-no-nis", + # For now, disable all the database drivers + "-no-sql-db2", "-no-sql-ibase", "-no-sql-mysql", "-no-sql-oci", "-no-sql-odbc", + "-no-sql-psql", "-no-sql-sqlite", "-no-sql-sqlite2", "-no-sql-tds"] + + + @when('@4') + def configure(self): + configure('-fast', + '-no-webkit', + *self.common_config_args) + + + @when('@5') + def configure(self): + configure('-no-eglfs', + '-no-directfb', + '-qt-xcb', + # If someone wants to get a webkit build working, be my guest! + '-skip', 'qtwebkit', + *self.common_config_args) + + def install(self, spec, prefix): - if self.spec.satisfies('@4'): - configure('-v', - '-confirm-license', - '-opensource', - '-prefix', prefix, - '-openssl-linked', - '-dbus-linked', - #'-fast', - '-opengl', - '-optimized-qmake', - '-no-pch', - # phonon required for py-pyqt - # '-no-phonon', - # '-no-phonon-backend', - '-no-openvg') - elif self.spec.satisfies('@5'): - # Apparently this is the only way to - # "truly" get rid of webkit compiles now... - os.rename("qtwebkit","no-qtwebkit") - os.rename("qtwebkit-examples","no-qtwebkit-examples") - - configure('-v', - '-confirm-license', - '-opensource', - '-prefix', prefix, - '-openssl-linked', - '-dbus-linked', - #'-fast', - '-opengl', - '-qt-xcb', - '-optimized-qmake', - '-no-pch', - # phonon required for py-pyqt - # '-no-phonon', - # '-no-phonon-backend', - '-no-openvg') + self.configure() make() make("install") -- cgit v1.2.3-70-g09d2 From c7b8a4e25cde8c2f32e514fdfe5280feca42e4ea Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 14:00:37 -0800 Subject: Fix for SPACK-46: cleanup spack clean, spack restage. --- lib/spack/spack/cmd/clean.py | 29 +++--------------- lib/spack/spack/cmd/restage.py | 46 +++++++++++++++++++++++++++++ lib/spack/spack/package.py | 21 +++---------- var/spack/packages/libdwarf/package.py | 7 ----- var/spack/packages/py-virtualenv/package.py | 4 --- 5 files changed, 54 insertions(+), 53 deletions(-) create mode 100644 lib/spack/spack/cmd/restage.py diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index ec3b221988..c20136ebe5 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -23,45 +23,24 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from external import argparse -import subprocess import llnl.util.tty as tty import spack import spack.cmd -import spack.stage as stage -description = "Remove staged files for packages" +description = "Remove build stage and source tarball for packages." def setup_parser(subparser): - subparser.add_argument('-c', "--clean", action="store_true", dest='clean', - help="run make clean in the build directory (default)") - subparser.add_argument('-w', "--work", action="store_true", dest='work', - help="delete the build directory and re-expand it from its archive.") - subparser.add_argument('-d', "--dist", action="store_true", dest='dist', - help="delete the downloaded archive.") subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to clean") def clean(parser, args): if not args.packages: - tty.die("spack clean requires at least one package argument") + tty.die("spack clean requires at least one package spec.") specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.db.get(spec) - if args.dist: - package.do_clean_dist() - tty.msg("Cleaned %s" % package.name) - - elif args.work: - package.do_clean_work() - tty.msg("Restaged %s" % package.name) - - else: - try: - package.do_clean() - except subprocess.CalledProcessError, e: - tty.warn("Warning: 'make clean' didn't work. Consider 'spack clean --work'.") - tty.msg("Made clean for %s" % package.name) + package.do_clean() diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py new file mode 100644 index 0000000000..e735a12c32 --- /dev/null +++ b/lib/spack/spack/cmd/restage.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse + +import llnl.util.tty as tty + +import spack +import spack.cmd + +description = "Revert checked out package source code." + +def setup_parser(subparser): + subparser.add_argument('packages', nargs=argparse.REMAINDER, + help="specs of packages to restage") + + +def restage(parser, args): + if not args.packages: + tty.die("spack restage requires at least one package spec.") + + specs = spack.cmd.parse_specs(args.packages, concretize=True) + for spec in specs: + package = spack.db.get(spec) + package.do_restage() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index bc8541a184..492af12053 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1047,26 +1047,13 @@ class Package(object): tree.unmerge(self.prefix, ignore=ignore) - def do_clean(self): - if self.stage.expanded_archive_path: - self.stage.chdir_to_source() - self.clean() - - - def clean(self): - """By default just runs make clean. Override if this isn't good.""" - # TODO: should we really call make clean, ro just blow away the directory? - make = build_env.MakeExecutable('make', self.parallel) - make('clean') - - - def do_clean_work(self): - """By default just blows away the stage directory and re-stages.""" + def do_restage(self): + """Reverts expanded/checked out source to a pristine state.""" self.stage.restage() - def do_clean_dist(self): - """Removes the stage directory where this package was built.""" + def do_clean(self): + """Removes the package's build stage and source tarball.""" if os.path.exists(self.stage.path): self.stage.destroy() diff --git a/var/spack/packages/libdwarf/package.py b/var/spack/packages/libdwarf/package.py index c4d71ebc01..099a974e93 100644 --- a/var/spack/packages/libdwarf/package.py +++ b/var/spack/packages/libdwarf/package.py @@ -53,13 +53,6 @@ class Libdwarf(Package): parallel = False - def clean(self): - for dir in dwarf_dirs: - with working_dir(dir): - if os.path.exists('Makefile'): - make('clean') - - def install(self, spec, prefix): # dwarf build does not set arguments for ar properly make.add_default_arg('ARFLAGS=rcs') diff --git a/var/spack/packages/py-virtualenv/package.py b/var/spack/packages/py-virtualenv/package.py index 9d94c2dcda..2d10d440a6 100644 --- a/var/spack/packages/py-virtualenv/package.py +++ b/var/spack/packages/py-virtualenv/package.py @@ -11,9 +11,5 @@ class PyVirtualenv(Package): extends('python') depends_on('py-setuptools') - def clean(self): - if os.path.exists('build'): - shutil.rmtree('build') - def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) -- cgit v1.2.3-70-g09d2 From e67655c31a9d98a65b3c9cd43ca329d8b97ba95b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 14:29:55 -0800 Subject: docs autodetect version. --- lib/spack/docs/conf.py | 14 ++++++++++---- lib/spack/spack/__init__.py | 2 +- lib/spack/spack/cmd/package-list.py | 9 +++++---- 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index b4d49c594d..b01f33d4b8 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -35,7 +35,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import sys +import os +import subprocess # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -43,9 +45,13 @@ import sys, os sys.path.insert(0, os.path.abspath('exts')) # Add the Spack bin directory to the path so that we can use its output in docs. -os.environ['SPACK_ROOT'] = '../../..' +spack_root = '../../..' +os.environ['SPACK_ROOT'] = spack_root os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin' +spack_version = subprocess.Popen( + ['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.') + # Set an environment variable so that colify will print output like it would to # a terminal. os.environ['COLIFY_TTY'] = 'true' @@ -97,9 +103,9 @@ copyright = u'2013-2014, Lawrence Livermore National Laboratory' # built documents. # # The short X.Y version. -version = '1.0' +version = '.'.join(spack_version[:2]) # The full version, including alpha/beta/rc tags. -release = '1.0' +release = '.'.join(spack_version[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 6763411f7d..eb891e3d57 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -78,7 +78,7 @@ concretizer = DefaultConcretizer() # Version information from spack.version import Version -spack_version = Version("0.8") +spack_version = Version("0.8.15") # # Executables used by Spack diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index 87c528881e..809c64a5b9 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import re +import cgi from StringIO import StringIO import llnl.util.tty as tty from llnl.util.tty.colify import * @@ -70,9 +71,9 @@ def print_rst_package_list(): print print pkg.name print "-" * len(pkg.name) - print "Links" - print " * `Homepage <%s>`__" % pkg.homepage - print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg)) + print "Links:" + print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage) + print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg)) print if pkg.versions: print "Versions:" @@ -82,7 +83,7 @@ def print_rst_package_list(): print " " + ", ".join("`%s`_" % d if d != "mpi" else d for d in pkg.dependencies) print - print "Description" + print "Description:" print pkg.format_doc(indent=2) print print "-----" -- cgit v1.2.3-70-g09d2 From 2755171e08777a2b2e8449166c792956b7e8304c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 14:46:00 -0800 Subject: Update documentation to reflect new restage/clean behavior. --- lib/spack/docs/packaging_guide.rst | 40 ++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ec2ca4d099..076d3ca0e6 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1964,35 +1964,33 @@ apply cleanly on some previous run, then it will restage the entire package before patching. -``spack clean`` +``spack restage`` ~~~~~~~~~~~~~~~~~ +Restores the source code to pristine state, as it was before building. -There are several variations of ``spack clean``. With no arguments, -``spack clean`` runs ``make clean`` in the expanded archive directory. -This is useful if an attempted build failed, and something needs to be -changed to get a package to build. If a particular package does not -have a ``make clean`` target, this will do nothing. +Does this in one of two ways: + + 1. If the source was fetched as a tarball, deletes the entire build + directory and re-expands the tarball. + + 2. If the source was checked out from a repository, this deletes the + build directory and checks it out again. -``spack clean -w / --work`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Deletes the entire build directory and re-expands it from the downloaded -archive. This is useful if a package does not support a proper ``make clean`` -target. -``spack clean -d / --dist`` +``spack clean`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Deletes the build directory *and* the downloaded archive. If -``fetch``, ``stage``, or ``install`` are run again after this, the -process will start from scratch, and the archive archive will be -downloaded again. Useful if somehow a bad archive is downloaded -accidentally and needs to be cleaned out of the staging area. +Cleans up temporary files for a particular package, by deleting the +expanded/checked out source code *and* any downloaded archive. If +``fetch``, ``stage``, or ``install`` are run again after this, Spack's +build process will start from scratch. + ``spack purge`` ~~~~~~~~~~~~~~~~~ - -Cleans up *everything* in the build directory. You can use this to -recover disk space if temporary files from interrupted or failed -installs accumulate in the staging area. +Cleans up all of Spack's temporary files. Use this to recover disk +space if temporary files from interrupted or failed installs +accumulate in the staging area. This is equivalent to running ``spack +clean`` for every package you have fetched or staged. Keeping the stage directory on success -- cgit v1.2.3-70-g09d2 From 2eda01c703cbd1692130ffc8ed48a9163fda87b6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 16:21:15 -0800 Subject: uninstall -f ignores nonexisting packages. --- lib/spack/spack/cmd/uninstall.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 0962942f43..6ded455390 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -66,6 +66,7 @@ def uninstall(parser, args): tty.die(*args) if len(matching_specs) == 0: + if args.force: continue tty.die("%s does not match any installed packages." % spec) for s in matching_specs: -- cgit v1.2.3-70-g09d2 From 2374eb4dca2a3bb4db5586251964d5ca9166b45a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 16:45:12 -0800 Subject: Fix for SPACK-62 - deactivate -a errors if arg is not activated - deactivate -af does not. --- lib/spack/spack/cmd/activate.py | 2 ++ lib/spack/spack/cmd/deactivate.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py index 71eca4f453..4070baaa70 100644 --- a/lib/spack/spack/cmd/activate.py +++ b/lib/spack/spack/cmd/activate.py @@ -38,6 +38,7 @@ def setup_parser(subparser): def activate(parser, args): + # TODO: shouldn't have to concretize here. Fix DAG issues. specs = spack.cmd.parse_specs(args.spec, concretize=True) if len(specs) != 1: tty.die("activate requires one spec. %d given." % len(specs)) @@ -47,6 +48,7 @@ def activate(parser, args): spack.db.get(specs[0]) spec = spack.cmd.disambiguate_spec(specs[0]) + if not spec.package.is_extension: tty.die("%s is not an extension." % spec.name) diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index bfec618c8e..c9a4d4b2f6 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -44,6 +44,7 @@ def setup_parser(subparser): def deactivate(parser, args): + # TODO: shouldn't have to concretize here. Fix DAG issues. specs = spack.cmd.parse_specs(args.spec, concretize=True) if len(specs) != 1: tty.die("deactivate requires one spec. %d given." % len(specs)) @@ -59,6 +60,7 @@ def deactivate(parser, args): if pkg.extendable: tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) ext_pkgs = spack.db.installed_extensions_for(spec) + for ext_pkg in ext_pkgs: ext_pkg.spec.normalize() if ext_pkg.activated: @@ -68,6 +70,9 @@ def deactivate(parser, args): # TODO: store DAG info properly (see above) spec.normalize() + if not args.force and not spec.package.activated: + tty.die("%s is not activated." % pkg.spec.short_spec) + tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec) topo_order = topological_sort(spec) -- cgit v1.2.3-70-g09d2 From 02e316e7724e8f39e69306f03bcbff1eaa12827c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 16:45:54 -0800 Subject: Convert ValueErrors to SpackError subclass. --- lib/spack/spack/package.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 492af12053..fed62f6cb7 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -941,16 +941,17 @@ class Package(object): def _sanity_check_extension(self): if not self.is_extension: - raise ValueError("This package is not an extension.") + raise ActivationError("This package is not an extension.") + extendee_package = self.extendee_spec.package extendee_package._check_extendable() if not extendee_package.installed: - raise ValueError("Can only (de)activate extensions for installed packages.") + raise ActivationError("Can only (de)activate extensions for installed packages.") if not self.installed: - raise ValueError("Extensions must first be installed.") + raise ActivationError("Extensions must first be installed.") if not self.extendee_spec.name in self.extendees: - raise ValueError("%s does not extend %s!" % (self.name, self.extendee.name)) + raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name)) def do_activate(self, **kwargs): -- cgit v1.2.3-70-g09d2 From 14e70ad68959097bcfa4bda38cbafe8d1e71127b Mon Sep 17 00:00:00 2001 From: Saravan Pantham Date: Wed, 18 Feb 2015 18:05:57 -0800 Subject: Added netcdf package support. --- var/spack/packages/netcdf/package.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 var/spack/packages/netcdf/package.py diff --git a/var/spack/packages/netcdf/package.py b/var/spack/packages/netcdf/package.py new file mode 100644 index 0000000000..34284ea725 --- /dev/null +++ b/var/spack/packages/netcdf/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Netcdf(Package): + """NetCDF is a set of software libraries and self-describing, machine-independent + data formats that support the creation, access, and sharing of array-oriented + scientific data.""" + + homepage = "http://www.unidata.ucar.edu/software/netcdf/" + url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" + + version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') + + # Dependencies: + # >HDF5 + depends_on("hdf5") + + def install(self, spec, prefix): + configure( + "--prefix=%s" % prefix, + "--disable-dap", # Disable DAP. + "--disable-shared", # Don't build shared libraries (use static libs). + "CPPFLAGS=-I%s/include" % spec['hdf5'].prefix, # Link HDF5's include dir. + "LDFLAGS=-L%s/lib" % spec['hdf5'].prefix) # Link HDF5's lib dir. + + make("install") + + # Check the newly installed netcdf package. Currently disabled. + # make("check") -- cgit v1.2.3-70-g09d2 From 065e5ccd1a09cdb24db78c871cd512b4d305e0e6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 18 Feb 2015 20:51:50 -0800 Subject: Update contributors list. --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 74a327b517..69632b8933 100644 --- a/README.md +++ b/README.md @@ -36,8 +36,7 @@ Authors ---------------- Spack was written by Todd Gamblin, tgamblin@llnl.gov. -Significant contributions were also made by the following awesome -people: +Significant contributions were also made by: * David Beckingsale * David Boehme @@ -46,7 +45,10 @@ people: * Matt Legendre * Greg Lee * Adam Moody + * Saravan Pantham + * Joachim Protze * Bob Robey + * Justin Too Release ---------------- -- cgit v1.2.3-70-g09d2 From d49c98188a68d4aad0410ac1d0df759117773937 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 22 Feb 2015 21:17:18 -0800 Subject: Add an override to colify so we can set terminal dimensions. --- lib/spack/docs/conf.py | 4 +--- lib/spack/llnl/util/tty/colify.py | 9 +++++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index b4d49c594d..7b350f73b5 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -48,9 +48,7 @@ os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin' # Set an environment variable so that colify will print output like it would to # a terminal. -os.environ['COLIFY_TTY'] = 'true' -os.environ['COLUMNS'] = '80' -os.environ['LINES'] = '25' +os.environ['COLIFY_SIZE'] = '25x80' # Enable todo items todo_include_todos = True diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 6b2909990c..66c52c3968 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -169,6 +169,15 @@ def colify(elts, **options): if not elts: return (0, ()) + # environment size is of the form "x" + env_size = os.environ.get('COLIFY_SIZE') + if env_size: + try: + r, c = env_size.split('x') + console_rows, console_cols = int(r), int(c) + tty = True + except: pass + # Use only one column if not a tty. if not tty: if tty is False or not output.isatty(): -- cgit v1.2.3-70-g09d2 From 6dab133d9f001d5e03fde25bdd3d412adf2a7b56 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 22 Feb 2015 23:00:14 -0800 Subject: Same package add icon on mac and linux. --- lib/spack/spack/package.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fed62f6cb7..137e8f8837 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1171,12 +1171,8 @@ def validate_package_url(url_string): def print_pkg(message): """Outputs a message with a package icon.""" - mac_ver = py_platform.mac_ver()[0] - if mac_ver and Version(mac_ver) >= Version('10.7'): - print u"\U0001F4E6" + tty.indent, - else: - from llnl.util.tty.color import cwrite - cwrite('@*g{[+]} ') + from llnl.util.tty.color import cwrite + cwrite('@*g{[+]} ') print message -- cgit v1.2.3-70-g09d2 From 5699cbb597468911c0b34988512ddea8b4c62ecc Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 23 Feb 2015 01:22:49 -0800 Subject: Fix SPACK-60: 0.8.15 basic docs. --- lib/spack/docs/basic_usage.rst | 716 +++++++++++++++++++++++++----------- lib/spack/docs/packaging_guide.rst | 74 ++++ lib/spack/spack/cmd/package-list.py | 2 + 3 files changed, 583 insertions(+), 209 deletions(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 196b7077f9..bd25d739ea 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -4,18 +4,17 @@ Basic usage ===================== Spack is implemented as a single command (``spack``) with many -*subcommands*, much like ``git``, ``svn``, ``yum``, or ``apt-get``. -Only a small subset of commands are needed for typical usage. - -This section covers a small set of subcommands that should cover most -general use cases for Spack. +*subcommands*. Only a small subset of commands is needed for typical +usage. Listing available packages ------------------------------ -The first thing you will likely want to do with spack is find out what -software is available to install. There are a few relevant commands. +The first thing you likely want to do with spack is to install some +software. Before that, you need to know what's available. You can +see avaialble package names either using the :ref:`package-list`, or +using the commands below. ``spack list`` ~~~~~~~~~~~~~~~~ @@ -36,33 +35,273 @@ do wildcard searches using ``*``: ``spack info`` ~~~~~~~~~~~~~~~~ -To get information on a particular package from the full list, run -``spack info ``. For example, for ``mpich`` the output -looks like this: +To get more information on a particular package from `spack list`, use +`spack info`. Just supply the name of a package: .. command-output:: spack info mpich -This includes basic information about the package: where to download -it, its dependencies, virtual packages it provides (e.g. an MPI -implementation will provide the MPI interface), and a text -description, if one is available. :ref:`Dependencies -` and :ref:`virtual dependencies -` are described in more detail later. +Most of the information is self-explanatory. *Safe versions* are +versions that Spack has a checksum for, and Spack will use the +checksum to ensure they downloaded without any errors or malicious +attacks. :ref:`Dependencies ` and :ref:`virtual +dependencies `, are described in more detail +later. ``spack versions`` ~~~~~~~~~~~~~~~~~~~~~~~~ -To see available versions of a package, run ``spack versions``, for -example: +To see *more* available versions of a package, run ``spack versions``, +for example: .. command-output:: spack versions libelf -Since it has to manage many different software packages, Spack doesn't -place many restrictions on what a package version has to look like. -Packages like ``mpich`` use traditional version numbers like -``3.0.4``. Other packages, like ``libdwarf`` use date-stamp versions -like ``20130729``. Versions can contain numbers, letters, dashes, -underscores, and periods. +There are two sections in the output. *Safe versions* are ones that +have already been checksummed. Spack goes a step further, though, and +also shows you what versions are available out on the web---these are +*remote versions*. Spack gets this information by scraping it +directly from webpages. Depending on the package, Spack may or may +not be able to find any remote versions. + + +Installing and uninstalling +------------------------------ + +Now that you know how to list avaiable packages and versions, you're +ready to start installing things. + +``spack install`` +~~~~~~~~~~~~~~~~~~~~~ + +``spack install`` will install any package shown by ``spack list``. +To install the latest version of a pacakge, along with all of its +dependencies, simply give it a package name: + +.. code-block:: sh + + $ spack install mpileaks + +If `mpileaks` depends on other packages, Spack will install those +first. It then fetches the tarball for ``mpileaks``, expands it, +verifies that it was downloaded without errors, builds it, and +installs it in its own directory under ``$SPACK_HOME/opt``. You'll see +a number of messages from spack, a lot of build output, and a message +that the packages is installed: + +.. code-block:: sh + + $ spack install mpileaks + ==> Installing mpileaks + ==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4. + ==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318. + ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da. + ==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz + ######################################################################## 100.0% + ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz + ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23. + ==> No patches needed for mpileaks. + ==> Building mpileaks. + + ... build output ... + + ==> Successfully installed mpileaks. + Fetch: 2.16s. Build: 9.82s. Total: 11.98s. + [+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23 + +The last line, with the ``[+]``, indicates where the package is +installed. + +Building a specific version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Spack can also build *specific versions* of a package. To do this, +just add ``@`` after the package name, followed by a version: + +.. code-block:: sh + + $ spack install mpich@3.0.4 + +Any number of versions of the same package can be installed at once +without interfering with each other. This is good for multi-user +sites, as installing a version that one user needs will not disrupt +existing installations for other users. + +In addition to different versions, Spack can customize the compiler, +compile-time options (variants), and platform (for cross compiles) of +an installation. Spack is unique in that it can also configure the +*dependencies* a package is built with. For example, two +configurations of the same version of a package, one built with boost +1.39.0, and the other version built with version 1.43.0, can coexist. + +This can all be done on the command line using special syntax. Spack +calls the descriptor used to refer to a particular package +configuration a **spec**. In the command lines above, both +``mpileaks`` and ``mpileaks@3.0.4`` are specs. Specs are described in +detail in :ref:`sec-specs`. + + +``spack uninstall`` +~~~~~~~~~~~~~~~~~~~~~ + +To uninstall a package, type ``spack uninstall ``. This will +completely remove the directory in which the package was installed. + +.. code-block:: sh + + spack uninstall mpich + +If there are still installed packages that depend on the package to be +uninstalled, spack will refuse to uninstall it. You can override this +behavior with ``spack uninstall -f ``, but you risk breaking +other installed packages. In general, it is safer to remove dependent +packages *before* removing their dependencies. + +A line like ``spack uninstall mpich`` may be ambiguous, if multiple +``mpich`` configurations are installed. For example, if both +``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer +to either one. Because it cannot determine which one to uninstall, +Spack will ask you to provide a version number to remove the +ambiguity. As an example, ``spack uninstall mpich@3.1`` is +unambiguous in this scenario. + + +Seeing installed packages +----------------------------------- + +We know that ``spack list`` shows you the names of available packages, +but how do you figure out which are installed? + + +``spack find`` +~~~~~~~~~~~~~~~~~~~~~~ + +``spack find`` shows the *specs* of installed packages. A spec is +like a name, but it has a version, compiler, architecture, and build +options associated with it. In spack, you can have many installations +of the same package with different specs. + +Running ``spack find`` with no arguments lists installed packages: + +.. code-block:: sh + + $ spack find + ==> 74 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0 + adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1 + atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2 + boost@1.55.0 libelf@0.8.13 py-nose@1.3.4 + bzip2@1.0.6 libffi@3.1 py-numpy@1.9.1 + cairo@1.14.0 libmng@2.0.2 py-pygments@2.0.1 + callpath@1.0.2 libpng@1.6.16 py-pyparsing@2.0.3 + cmake@3.0.2 libtiff@4.0.3 py-pyside@1.2.2 + dbus@1.8.6 libtool@2.4.2 py-pytz@2014.10 + dbus@1.9.0 libxcb@1.11 py-setuptools@11.3.1 + dyninst@8.1.2 libxml2@2.9.2 py-six@1.9.0 + fontconfig@2.11.1 libxml2@2.9.2 python@2.7.8 + freetype@2.5.3 llvm@3.0 qhull@1.0 + gdk-pixbuf@2.31.2 memaxes@0.5 qt@4.8.6 + glib@2.42.1 mesa@8.0.5 qt@5.4.0 + graphlib@2.0.0 mpich@3.0.4 readline@6.3 + gtkplus@2.24.25 mpileaks@1.0 sqlite@3.8.5 + harfbuzz@0.9.37 mrnet@4.1.0 stat@2.1.0 + hdf5@1.8.13 ncurses@5.9 tcl@8.6.3 + icu@54.1 netcdf@4.3.3 tk@src + jpeg@9a openssl@1.0.1h vtk@6.1.0 + launchmon@1.0.1 pango@1.36.8 xcb-proto@1.11 + lcms@2.6 pixman@0.32.6 xz@5.2.0 + libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8 + + -- chaos_5_x86_64_ib / gcc@4.9.2 -------------------------------- + libelf@0.8.10 mpich@3.0.4 + +Packages are divided into groups according to their architecture and +compiler. Within each group, Spack tries to keep the view simple, and +only shows the version of installed packages. + +In some cases, there may be differnt configurations of the *same* +version of a package installed. For example, there are two +installations of of ``libdwarf@20130729`` above. We can look at them +in more detail using ``spack find -d``, and by asking only to show +``libdwarf`` packages: + +.. code-block:: sh + + $ spack find --deps libdwarf + ==> 2 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + libdwarf@20130729-d9b90962 + ^libelf@0.8.12 + libdwarf@20130729-b52fac98 + ^libelf@0.8.13 + +Now we see that the two instances of ``libdwarf`` depend on +*different* versions of ``libelf``: 0.8.12 and 0.8.13. This view can +become complicated for packages with many dependencies. If you just +want to know whether two packages' dependencies differ, you can use +``spack find -l``: + +.. code-block:: sh + + $ spack find -l libdwarf + ==> 2 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98 + +Now the ``libwarf`` installs have hashes after their names. These are +hashes over all of the dependencies of each package. If the hashes +are the same, then the packages have the same dependency configuration. + +If you want to know the path where each package is installed, you can +use ``spack find -p``: + +.. code-block:: sh + + $ spack find -p + ==> 74 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd + adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da + atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09 + boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0 + bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6 + cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44 + callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318 + ... + +And, finally, you can restrict your search to a particular package +by supplying its name: + +.. code-block:: sh + + $ spack find -p libelf + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11 + libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12 + libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13 + +``spack find`` actually does a lot more than this. You can use +*specs* to query for specific configurations and builds of each +package. If you want to find only libelf versions greater than version +0.8.12, you could say: + +.. code-block:: sh + + $ spack find libelf@0.8.12: + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + libelf@0.8.12 libelf@0.8.13 + +Finding just the versions of libdwarf built with a particular version +of libelf would look like this: + +.. code-block:: sh + + $ spack find -l libdwarf ^libelf@0.8.12 + ==> 1 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + libdwarf@20130729-d9b90962 + +The full spec syntax is discussed in detail in :ref:`sec-specs`. + Compiler Configuration ----------------------------------- @@ -110,15 +349,18 @@ where the compiler is installed. For example:: intel@13.0.079 Or you can run ``spack compiler add`` with no arguments to force -autodetection. This is useful if you do not know where compilers -live, but new compilers have been added to your ``PATH``. For -example, using dotkit, you might do this:: +autodetection. This is useful if you do not know where compilers are +installed, but you know that new compilers have been added to your +``PATH``. For example, using dotkit, you might do this:: - $ use gcc-4.9.0 + $ module load gcc-4.9.0 $ spack compiler add ==> Added 1 new compiler to /Users/gamblin2/.spackconfig gcc@4.9.0 +This loads the environment module for gcc-4.9.0 to get it into the +``PATH``, and then it adds the compiler to Spack. + ``spack compiler info`` ~~~~~~~~~~~~~~~~~~~~~~~ @@ -126,17 +368,20 @@ example, using dotkit, you might do this:: If you want to see specifics on a particular compiler, you can run ``spack compiler info`` on it:: - $ spack compiler info intel@12.1.3 - intel@12.1.3: - cc = /usr/local/bin/icc-12.1.293 - cxx = /usr/local/bin/icpc-12.1.293 - f77 = /usr/local/bin/ifort-12.1.293 - fc = /usr/local/bin/ifort-12.1.293 + $ spack compiler info intel@15 + intel@15.0.0: + cc = /usr/local/bin/icc-15.0.090 + cxx = /usr/local/bin/icpc-15.0.090 + f77 = /usr/local/bin/ifort-15.0.090 + fc = /usr/local/bin/ifort-15.0.090 This shows which C, C++, and Fortran compilers were detected by Spack. +Notice also that we didn't have to be too specific about the +version. We just said ``intel@15``, and information about the only +matching Intel compiler was displayed. -Manual configuration +Manual compiler configuration ~~~~~~~~~~~~~~~~~~~~~~~ If autodetection fails, you can manually conigure a compiler by @@ -153,8 +398,8 @@ Each compiler configuration in the file looks like this:: fc = /usr/local/bin/ifort-15.0.024-beta ... -For compilers, like ``clang``, that do not support Fortran, you can simply -put ``None`` for ``f77`` and ``fc``:: +For compilers, like ``clang``, that do not support Fortran, put +``None`` for ``f77`` and ``fc``:: [compiler "clang@3.3svn"] cc = /usr/bin/clang @@ -163,169 +408,7 @@ put ``None`` for ``f77`` and ``fc``:: fc = None Once you save the file, the configured compilers will show up in the -list displayed when you run ``spack compilers``. - - -Seeing installed packages ----------------------------------- - -``spack find`` -~~~~~~~~~~~~~~~~~~~~~~ - -The second thing you're likely to want to do with Spack, and the first -thing users of your system will likely want to do, is to find what -software is already installed and ready to use. You can do that with -``spack find``. - -Running ``spack find`` with no arguments will list all the installed -packages: - -.. code-block:: sh - - $ spack find - == chaos_5_x86_64_ib =========================================== - -- gcc@4.4.7 --------------------------------------------------- - libdwarf@20130207-d9b909 - libdwarf@20130729-d9b909 - libdwarf@20130729-b52fac - libelf@0.8.11 - libelf@0.8.12 - libelf@0.8.13 - -Packages are grouped by architecture, then by the compiler used to -build them, and then by their versions and options. If a package has -dependencies, there will also be a hash at the end of the name -indicating the dependency configuration. Packages with the same hash -have the same dependency configuration. If you want ALL information -about dependencies, as well, then you can supply ``-l`` or ``--long``: - -.. code-block:: sh - - $ spack find -l - == chaos_5_x86_64_ib =========================================== - -- gcc@4.4.7 --------------------------------------------------- - libdwarf@20130207 - ^libelf@0.8.12 - libdwarf@20130729 - ^libelf@0.8.12 - libdwarf@20130729 - ^libelf@0.8.13 - libelf@0.8.11 - libelf@0.8.12 - libelf@0.8.13 - -Now you can see which versions of ``libelf`` each version of -``libdwarf`` was built with. - -If you want to know the path where each of these packages is -installed, do ``spack find -p`` or ``--path``: - -.. code-block:: sh - - $ spack find -p - == chaos_5_x86_64_ib =========================================== - -- gcc@4.4.7 --------------------------------------------------- - libdwarf@20130207-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130207-d9b909 - libdwarf@20130729-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-d9b909 - libdwarf@20130729-b52fac /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-b52fac - libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11 - libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12 - libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13 - - -And, finally, you can restrict your search to a particular package -by supplying its name: - -.. code-block:: sh - - $ spack find -p libelf - == chaos_5_x86_64_ib =========================================== - -- gcc@4.4.7 --------------------------------------------------- - libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11 - libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12 - libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13 - - -``spack find`` actually does a lot more than this. You can use -*specs* to query for specific configurations and builds of each -package. The full spec syntax is discussed in detail in -:ref:`sec-specs`. - - - -Installing and uninstalling ------------------------------- - -``spack install`` -~~~~~~~~~~~~~~~~~~~~~ - -``spack install`` will install any package that appears in the output -of ``spack list``. To install the latest version of a pacakge and all -of its dependencies, simply run ``spack install ``: - -.. code-block:: sh - - spack install mpileaks - -Spack will fetch the tarball for ``mpileaks``, expand it, verify that -it was downloaded without errors, build it, and install it in its own -directory under ``$SPACK_HOME/opt``. If the requested package depends -on other packages in order to build, Spack fetches them as well, and -installs them before it installs the requested package. Like the main -package, each dependency is also installed in its own directory. - -Spack can also build *specific* configurations of a package. For -example, to install something with a specific version, add ``@`` after -the package name, followed by a version string: - -.. code-block:: sh - - spack install mpich@3.0.4 - -Any number of configurations of the same package can be installed at -once without interfering with each other. This is good for multi-user -sites, as installing a version that one user needs will not disrupt -existing installations for other users. - -In addition to version configuraitons, Spack can customize the -compiler, compile-time options (variants), and platform (for cross -compiles) of an installation. Spack is unique in that it can also -configure the *dependencies* a package is built with. For example, -two configurations of the same version of a package, one built with -boost 1.39.0, and the other version built with version 1.43.0, can -coexist. - -This can all be done on the command line using special syntax. Spack -calls the descriptor used to refer to a particular package -configuration a **spec**. In the command lines above, both -``mpileaks`` and ``mpileaks@3.0.4`` are specs. To customize -additional properties, simply add more attributes to the spec. Specs -and their syntax are covered in more detail in :ref:`sec-specs`. - - -``spack uninstall`` -~~~~~~~~~~~~~~~~~~~~~ - -To uninstall a package, type ``spack uninstall ``. This will -completely remove the directory in which the package was installed. - -.. code-block:: sh - - spack uninstall mpich - -If there are still installed packages that depend on the package to be -uninstalled, spack will refuse to uninstall. If you know what you're -doing, you can override this with ``spack uninstall -f ``. -However, running this risks breaking other installed packages. In -general, it is safer to remove dependent packages *before* removing -their dependencies. - -A line like ``spack uninstall mpich`` may be ambiguous, if multiple -``mpich`` configurations are installed. For example, if both -``mpich@3.0.2`` and ``mpich@3.1`` are installed, it could refer to -either one, and Spack cannot determine which one to uninstall. Spack -will ask you to provide a version number to remove the ambiguity. For -example, ``spack uninstall mpich@3.1`` is unambiguous in the above -scenario. +list displayed by ``spack compilers``. .. _sec-specs: @@ -333,10 +416,10 @@ scenario. Specs & Dependencies ------------------------- -We now know that ``spack install`` and ``spack uninstall`` both take a -package name with an optional version specifier. In Spack, that -descriptor is called a *spec*. Spack uses specs to refer to a -particular build configuration (or configurations) of a package. +We know that ``spack install``, ``spack uninstall``, and other +commands take a package name with an optional version specifier. In +Spack, that descriptor is called a *spec*. Spack uses specs to refer +to a particular build configuration (or configurations) of a package. Specs are more than a package name and a version; you can use them to specify the compiler, compiler version, architecture, compile options, and dependency options for a build. In this section, we'll go over @@ -499,6 +582,11 @@ based on site policies. Variants ~~~~~~~~~~~~~~~~~~~~~~~ +.. Note:: + + Variants are not yet supported, but will be in the next Spack + release (0.9), due in Q2 2015. + Variants are named options associated with a particular package, and they can be turned on or off. For example, above, supplying ``+debug`` causes ``mpileaks`` to be built with debug flags. The @@ -544,6 +632,11 @@ the command line is provided for convenience and legibility. Architecture specifier ~~~~~~~~~~~~~~~~~~~~~~~ +.. Note:: + + Architecture specifiers are part of specs but are not yet + functional. They will be in Spack version 1.0, due in Q3 2015. + The architecture specifier starts with a ``=`` and also comes after some package name within a spec. It allows a user to specify a particular architecture for the package to be built. This is mostly @@ -678,10 +771,6 @@ For ``csh`` and ``tcsh`` run: You can put the above code in your ``.bashrc`` or ``.cshrc``, and Spack's shell support will be available on the command line. - -------------------------------- - - When you install a package with Spack, it automatically generates an environment module that lets you add the package to your environment. @@ -710,7 +799,7 @@ of installed packages. $ module avail - ------- /g/g21/gamblin2/src/spack/share/spack/modules/chaos_5_x86_64_ib -------- + ------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib -------- adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7 automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0 boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5 @@ -858,6 +947,215 @@ regenerate all module and dotkit files from scratch: ==> Regenerating tcl module files. ==> Regenerating dotkit module files. + +.. _extensions: + +Extensions & Python Support +------------------------------------ + +Spack's installation model assumes that each package will live in its +own install prefix. However, certain packages are typically installed +*within* the directory hierarchy of other packages. For example, +modules in interpreted languages like `Python +`_ are typically installed in the +``$prefix/lib/python-2.7/site-packages`` directory. + +Spack has support for this type of installation as well. In Spack, +a package that can live inside the prefix of another package is called +an *extension*. Suppose you have Python installed like so: + +.. code-block:: sh + + $ spack find python + ==> 1 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + python@2.7.8 + +``spack extensions`` +~~~~~~~~~~~~~~~~~~~~~~~ + +You can find extensions for your Python installation like this: + +.. code-block:: sh + + $ spack extensions python + ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96 + ==> 36 extensions: + geos py-ipython py-pexpect py-pyside py-sip + py-basemap py-libxml2 py-pil py-pytz py-six + py-biopython py-mako py-pmw py-rpy2 py-sympy + py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv + py-dateutil py-mpi4py py-pygments py-scikit-learn + py-epydoc py-mx py-pylint py-scipy + py-gnuplot py-nose py-pyparsing py-setuptools + py-h5py py-numpy py-pyqt py-shiboken + + ==> 12 installed: + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2 + py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10 + py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1 + py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0 + + ==> None activated. + +The extensions are a subset of what's returned by ``spack list``, and +they are packages like any ohter. They are installed into their own +prefixes, and you can see this with ``spack find -p``: + +.. code-block:: sh + $ spack find -p py-numpy + ==> 1 installed packages. + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244 + +However, even though this package is installed, you cannot use it +directly when you run ``python``: + +.. code-block:: sh + + $ spack load python + $ python + Python 2.7.8 (default, Feb 17 2015, 01:35:25) + [GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2 + Type "help", "copyright", "credits" or "license" for more information. + >>> import numpy + Traceback (most recent call last): + File "", line 1, in + ImportError: No module named numpy + >>> + +Extensions & Environment Modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are two ways to get ``numpy`` working in Python. The first is +to use :ref:`shell-support`. You can simply ``use`` or ``load`` the +module for the extension, and it will be added to the ``PYTHONPATH`` +in your current shell. + +For tcl modules: + +.. code-block:: sh + + $ spack load python + $ spack load py-numpy + +or, for dotkit: + +.. code-block:: sh + + $ spack use python + $ spack use py-numpy + +Now ``import numpy`` will succeed for as long as you keep your current +session open. + + +Activating Extensions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +It is often desirable to have certain packages *always* available as +part of a Python installation. Spack offers a more permanent solution +for this case. Instead of requiring users to load particular +environment modules, you can *activate* the package within the Python +installation: + +``spack activate`` +^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: sh + + $ spack activate py-numpy + ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7. + ==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7. + ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7. + +Several things have happened here. The user requested that +``py-numpy`` be activated in the ``python`` installation it was built +with. Spack knows that ``py-numpy`` depends on ``py-nose`` and +``py-setuptools``, so it activated those packages first. Finally, +once all dpeendencies were activated in the ``python`` installation, +``py-numpy`` was activated as well. + +If we run ``spack extensions`` again, we now see the three new +packages listed as activated: + +.. code-block:: sh + + $ spack extensions python + ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96 + ==> 36 extensions: + geos py-ipython py-pexpect py-pyside py-sip + py-basemap py-libxml2 py-pil py-pytz py-six + py-biopython py-mako py-pmw py-rpy2 py-sympy + py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv + py-dateutil py-mpi4py py-pygments py-scikit-learn + py-epydoc py-mx py-pylint py-scipy + py-gnuplot py-nose py-pyparsing py-setuptools + py-h5py py-numpy py-pyqt py-shiboken + + ==> 12 installed: + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2 + py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10 + py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1 + py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0 + + ==> 3 currently activated: + -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- + py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1 + + +Now, when a user runs python, ``numpy`` will be avaiable for import +*without* the user having to explicitly loaded. ``python@2.7.8`` now +acts like a system Python installation with ``numpy`` installed inside +of it. + +Spack accomplishes this by symbolically linking the *entire* prefix of +the ``py-numpy`` into the prefix of the ``python`` package. To the +python interpreter, it looks like ``numpy`` is installed in the +``site-packages`` directory. + +The only limitation of activation is that you can only have a *single* +version of an extension activated at a time. This is because multiple +versions of the same extension would conflict if symbolically linked +into the same prefix. Users who want a different version of a package +can still get it by using environment modules, but they will have to +explicitly load their preferred version. + +``spack activate -f`` +^^^^^^^^^^^^^^^^^^^^^^^^^ +If, for some reason, you want to activate a package *without* its +dependencies, you can use ``spack activate -f``: + +.. code-block:: sh + + $ spack activate -f py-numpy + ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7. + + +``spack deactivate`` +^^^^^^^^^^^^^^^^^^^^^^^^^ + +We've seen how activating an extension can be used to set up a default +version of a Python module. Obviously, you may want to change that at +some point. ``spack deactivate`` is the command for this. There are +several variants: + + * ``spack deactivate `` will deactivate a single + extension. If another activated extension depends on this one, + Spack will warn you and exit with an error. + * ``spack deactivate -f `` deactivates an extension + regardless of packages that depend on it. + * ``spack deactivate -a `` deactivates an extension and + all of its dependencies. Use ``-f`` to disregard dependents. + * ``spack deactivate -a `` deactivates *all* activated + extensions of a package. For example, to deactivate *all* python + extensions, use:: + + spack deactivate -a python + + Getting Help ----------------------- diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ec2ca4d099..e5ebdb4cd2 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2032,6 +2032,80 @@ to get rid of the install prefix before you build again: spack uninstall -f +Graphing Dependencies +-------------------------- + +Spack provides the ``spack graph`` command for graphing dependencies. +The command by default generates an ASCII rendering of a spec's +dependency graph. For example:: + + $ spack graph mpileaks + o mpileaks + |\ + | |\ + | o | callpath + |/| | + | |\| + | |\ \ + | | |\ \ + | | | | o adept-utils + | |_|_|/| + |/| | | | + o | | | | mpi + / / / / + | | o | dyninst + | |/| | + |/|/| | + | | |/ + | o | libdwarf + |/ / + o | libelf + / + o boost + +At the top is the root package in the DAG, with dependency edges +emerging from it. On a color terminal, the edges are colored by which +dependency they lead to. + +You can also use ``spack graph`` to generate graphs in the widely used +`Dot `_ format. For +example:: + + $ spack graph --dot mpileaks + digraph G { + label = "Spack Dependencies" + labelloc = "b" + rankdir = "LR" + ranksep = "5" + + "boost" [label="boost"] + "callpath" [label="callpath"] + "libdwarf" [label="libdwarf"] + "mpileaks" [label="mpileaks"] + "mpi" [label="mpi"] + "adept-utils" [label="adept-utils"] + "dyninst" [label="dyninst"] + "libelf" [label="libelf"] + + "callpath" -> "dyninst" + "callpath" -> "adept-utils" + "callpath" -> "mpi" + "callpath" -> "libelf" + "callpath" -> "libdwarf" + "libdwarf" -> "libelf" + "mpileaks" -> "adept-utils" + "mpileaks" -> "callpath" + "mpileaks" -> "mpi" + "adept-utils" -> "boost" + "adept-utils" -> "mpi" + "dyninst" -> "boost" + "dyninst" -> "libelf" + "dyninst" -> "libdwarf" + } + +This graph can be provided as input to other graphing tools, such as +those in `Graphviz `_. + Interactive Shell Support -------------------------- diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index 87c528881e..073363db0f 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -49,6 +49,8 @@ def print_rst_package_list(): """Print out information on all packages in restructured text.""" pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower()) + print ".. _package-list:" + print print "Package List" print "==================" -- cgit v1.2.3-70-g09d2 From daef78f53853f30d342c6f3f650f04b25d7457f2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 23 Feb 2015 10:31:22 -0800 Subject: Update packaging documentataion. --- lib/spack/docs/packaging_guide.rst | 235 +++++++++++++++++++++----------- var/spack/packages/py-pyside/package.py | 2 - 2 files changed, 156 insertions(+), 81 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index e5ebdb4cd2..48986f8065 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -16,16 +16,17 @@ There are two key parts of Spack: software according to a spec. Specs allow a user to describe a *particular* build in a way that a -package author can understand. Packages allow a developer to -encapsulate the logic build logic for different versions, compilers, +package author can understand. Packages allow a the packager to +encapsulate the build logic for different versions, compilers, options, platforms, and dependency combinations in one place. +Essentially, a package translates a spec into build logic. Packages in Spack are written in pure Python, so you can do anything in Spack that you can do in Python. Python was chosen as the implementation language for two reasons. First, Python is becoming -ubiquitous in the HPC community due to its use in numerical codes. -Second, it's a modern language and has many powerful features to help -make package writing easy. +ubiquitous in the scientific software community. Second, it's a modern +language and has many powerful features to help make package writing +easy. Creating & Editing Packages ---------------------------------- @@ -35,24 +36,23 @@ Creating & Editing Packages ``spack create`` ~~~~~~~~~~~~~~~~~~~~~ -The ``spack create`` command generates boilerplate package template -from a URL pointing to a tarball or other software archive. In most -cases, you'll only need to run this once, then slightly modify the -boilerplate to get your package working. +The ``spack create`` command generates a boilerplate package template +from a URL. The URL should point to a tarball or other software +archive. In most cases, ``spack create`` plus a few modifications is +all you need to get a package working. -All you need is the URL to a tarball (other archive formats are ok -too) you want to package: +Here's an example: .. code-block:: sh $ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz -When you run this, Spack looks at the tarball URL and tries to figure -out the name of the package to be created. It also tries to determine -out what version strings look like for this package. Using this -information, it tries to find *additional* versions by spidering the -package's webpage. If it finds multiple versions, Spack prompts you -to tell it how many versions you want to download and checksum. +Spack examines the tarball URL and tries to figure out the name of the +package to be created. It also tries to determine what version strings +look like for this package. Using this information, it will try to +find *additional* versions by spidering the package's webpage. If it +finds multiple versions, Spack prompts you to tell it how many +versions you want to download and checksum: .. code-block:: sh @@ -63,12 +63,6 @@ to tell it how many versions you want to download and checksum. 2.8.12.1 http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 2.8.12 http://www.cmake.org/files/v2.8/cmake-2.8.12.tar.gz 2.8.11.2 http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz - 2.8.11.1 http://www.cmake.org/files/v2.8/cmake-2.8.11.1.tar.gz - 2.8.11 http://www.cmake.org/files/v2.8/cmake-2.8.11.tar.gz - 2.8.10.2 http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz - 2.8.10.1 http://www.cmake.org/files/v2.8/cmake-2.8.10.1.tar.gz - 2.8.10 http://www.cmake.org/files/v2.8/cmake-2.8.10.tar.gz - 2.8.9 http://www.cmake.org/files/v2.8/cmake-2.8.9.tar.gz ... 2.8.0 http://www.cmake.org/files/v2.8/cmake-2.8.0.tar.gz @@ -77,10 +71,30 @@ to tell it how many versions you want to download and checksum. Spack will automatically download the number of tarballs you specify (starting with the most recent) and checksum each of them. -Note that you don't need to do everything up front. If your package -is large, you can always choose to download just one tarball for now, -then run :ref:`spack checksum ` later if you end up -wanting more. Let's say you choose to download 3 tarballs: +You do not *have* to download all of the versions up front. You can +always choose to download just one tarball initially, and run +:ref:`spack checksum ` later if you need more. + +.. note:: + + If ``spack create`` fails to detect the package name correctly, + you can try supplying it yourself, e.g.:: + + $ spack create --name cmake http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz + + If it fails entirely, you can get minimal boilerplate by using + :ref:`spack-edit-f`, or you can manually create a directory and + ``package.py`` file for the package in ``var/spack/packages``. + +.. note:: + + Spack can fetch packages from source code repositories, but, + ``spack create`` will *not* currently create a boilerplate package + from a repository URL. You will need to use :ref:`spack-edit-f` + and manually edit the ``version()`` directives to fetch from a + repo. See :ref:`vcs-fetch` for details. + +Let's say you download 3 tarballs: .. code-block:: sh @@ -93,8 +107,8 @@ wanting more. Let's say you choose to download 3 tarballs: ==> Fetching http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz #################################################################### 95.2% -Now Spack generates boilerplate code and opens the new -``package.py`` file in your favorite ``$EDITOR``: +Now Spack generates boilerplate code and opens a new ``package.py`` +file in your favorite ``$EDITOR``: .. code-block:: python :linenos: @@ -141,12 +155,6 @@ Now Spack generates boilerplate code and opens the new The tedious stuff (creating the class, checksumming archives) has been done for you. -.. note:: - - If ``spack create`` fails to download or to detect the package - version, you can use ``spack edit -f`` to generate simpler - boilerplate. See the next section for more on this. - In the generated package, the download ``url`` attribute is already set. All the things you still need to change are marked with ``FIXME`` labels. The first ``FIXME`` refers to the commented @@ -199,27 +207,30 @@ The ``cmake`` package actually lives in a much simpler shortcut and saves you the trouble of typing the full path. - -``spack edit -f`` -~~~~~~~~~~~~~~~~~~~~ If you try to edit a package that doesn't exist, Spack will recommend -using ``spack create``: +using ``spack create`` or ``spack edit -f``: .. code-block:: sh $ spack edit foo ==> Error: No package 'foo'. Use spack create, or supply -f/--force to edit a new file. -As the output advises, You can use ``spack edit -f/--force`` to force -the creation of a new, *very* simple boilerplate package: +.. _spack-edit-f: + +``spack edit -f`` +~~~~~~~~~~~~~~~~~~~~ + +``spack edit -f`` can be used to create a new, minimal boilerplate +package: .. code-block:: sh $ spack edit -f foo -Unlike ``spack create``, which tries to infer names and versions, and -which actually downloads the tarball and checksums it for you, ``spack -edit -f`` will substitute dummy values for you to fill in yourself: +Unlike ``spack create``, which infers names and versions, and which +actually downloads the tarball and checksums it for you, ``spack edit +-f`` has no such fanciness. It will substitute dummy values for you +to fill in yourself: .. code-block:: python :linenos: @@ -246,6 +257,13 @@ version of your package from the archive URL. Naming & Directory Structure -------------------------------------- +.. note:: + + Spack's default naming and directory structure will change in + version 0.9. Specifically, 0.9 will stop using directory names + with special characters like ``@``, to avoid interfering with + certain packages that do not handle this well. + This section describes how packages need to be named, and where they live in Spack's directory structure. In general, `spack-create`_ and `spack-edit`_ handle creating package files for you, so you can skip @@ -264,6 +282,7 @@ package: .. command-output:: cd $SPACK_ROOT/var/spack/packages; ls -CF :shell: + :ellipsis: 10 Each directory contains a file called ``package.py``, which is where all the python code for the package goes. For example, the ``libelf`` @@ -280,11 +299,9 @@ Package Names Packages are named after the directory containing ``package.py``. So, ``libelf``'s ``package.py`` lives in a directory called ``libelf``. -The ``package.py`` file contains a class called ``Libelf``, which -extends Spack's ``Package`` class. This is what makes it a Spack -package: - -``var/spack/packages/libelf/package.py`` +The ``package.py`` file defines a class called ``Libelf``, which +extends Spack's ``Package`` class. for example, here is +``$SPACK_ROOT/var/spack/packages/libelf/package.py``: .. code-block:: python :linenos: @@ -301,8 +318,9 @@ package: def install(): ... -The **directory name** (``libelf``) is what users need to provide on -the command line. e.g., if you type any of these: +The **directory name** (``libelf``) determines the package name that +users should provide on the command line. e.g., if you type any of +these: .. code-block:: sh @@ -311,8 +329,8 @@ the command line. e.g., if you type any of these: Spack sees the package name in the spec and looks for ``libelf/package.py`` in ``var/spack/packages``. Likewise, if you say -``spack install docbook-xml``, then Spack looks for -``docbook-xml/package.py``. +``spack install py-numpy``, then Spack looks for +``py-numpy/package.py``. Spack uses the directory name as the package name in order to give packagers more freedom in naming their packages. Package names can @@ -342,8 +360,7 @@ some examples: ================= ================= In general, you won't have to remember this naming convention because -`spack-create`_ and `spack-edit`_ will generate boilerplate for you, -and you can just fill in the blanks. +`spack-create`_ and `spack-edit`_ handle the details for you. Adding new versions @@ -381,9 +398,8 @@ For the URL above, you might have to add an explicit URL because the version can't simply be substituted in the original ``url`` to construct the new one for ``8.2.1``. -Wehn you supply a custom URL for a version, Spack uses that URL -*verbatim* when fetching the version, and will *not* perform -extrapolation. +When you supply a custom URL for a version, Spack uses that URL +*verbatim* and does not perform extrapolation. Checksums ~~~~~~~~~~~~~~~~~ @@ -392,10 +408,11 @@ Spack uses a checksum to ensure that the downloaded package version is not corrupted or compromised. This is especially important when fetching from insecure sources, like unencrypted http. By default, a package will *not* be installed if it doesn't pass a checksum test -(though users can overried this with ``spack install --no-checksum``). +(though you can override this with ``spack install --no-checksum``). Spack can currently support checksums using the MD5, SHA-1, SHA-224, -SHA-256, SHA-384, and SHA-512 algorithms. +SHA-256, SHA-384, and SHA-512 algorithms. It determines the algorithm +to use based on the hash length. ``spack md5`` ^^^^^^^^^^^^^^^^^^^^^^ @@ -459,16 +476,18 @@ By default, Spack will search for new tarball downloads by scraping the parent directory of the tarball you gave it. So, if your tarball is at ``http://example.com/downloads/foo-1.0.tar.gz``, Spack will look in ``http://example.com/downloads/`` for links to additional versions. -If you need to search another path for download links, see the -reference documentation on `attribute_list_url`_ and +If you need to search another path for download links, you can supply +some extra attributes that control how your package finds new +versions. See the documentation on `attribute_list_url`_ and `attributee_list_depth`_. .. note:: * This command assumes that Spack can extrapolate new URLs from an existing URL in the package, and that Spack can find similar URLs - on a webpage. If that's not possible, you'll need to manually add - ``version`` calls yourself. + on a webpage. If that's not possible, e.g. if the package's + developers don't name their tarballs consistently, you'll need to + manually add ``version`` calls yourself. * For ``spack checksum`` to work, Spack needs to be able to ``import`` your pacakge in Python. That means it can't have any @@ -481,32 +500,33 @@ reference documentation on `attribute_list_url`_ and Fetching from VCS Repositories -------------------------------------- -For some packages, source code is hosted in a Version Control System -(VCS) repository rather than as a tarball. Packages can be set up to -fetch from a repository instead of a tarball. Currently, Spack -supports fetching with `Git `_, `Mercurial (hg) -`_, and `Subversion (SVN) `_. +For some packages, source code is provided in a Version Control System +(VCS) repository rather than in a tarball. Spack can fetch packages +from VCS repositories. Currently, Spack supports fetching with `Git +`_, `Mercurial (hg) `_, and `Subversion (SVN) +`_. To fetch a package from a source repository, you add a ``version()`` call to your package with parameters indicating the repository URL and -any branch, tag, or revision to fetch. See below for the paramters +any branch, tag, or revision to fetch. See below for the parameters you'll need for each VCS system. Repositories and versions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The package author is responsible for coming up with a sensible name -for each version. For example, if you're fetching from a tag like -``v1.0``, you might call that ``1.0``. If you're fetching a nameless -git commit or an older subversion revision, you might give the commit -an intuitive name, like ``dev`` for a development version, or -``some-fancy-new-feature`` if you want to be more specific. +for each version to be fetched from a repository. For example, if +you're fetching from a tag like ``v1.0``, you might call that ``1.0``. +If you're fetching a nameless git commit or an older subversion +revision, you might give the commit an intuitive name, like ``dev`` +for a development version, or ``some-fancy-new-feature`` if you want +to be more specific. In general, it's recommended to fetch tags or particular commits/revisions, NOT branches or the repository mainline, as branches move forward over time and you aren't guaranteed to get the same thing every time you fetch a particular version. Life isn't -simple, though, so this is not strictly enforced. +always simple, though, so this is not strictly enforced. In some future release, Spack may support extrapolating repository versions as it does for tarball URLs, but currently this is not @@ -633,7 +653,7 @@ Subversion To fetch with subversion, use the ``svn`` and ``revision`` parameters: -Head +Fetching the head Simply add an ``svn`` parameter to ``version``: .. code-block:: python @@ -642,7 +662,7 @@ Head This is not recommended, as the head will move forward over time. -Revisions +Fetching a revision To fetch a particular revision, add a ``revision`` to the version call: @@ -746,6 +766,53 @@ from the URL and then applied to your source code. applies cleanly with ``-p1``, but if you're using a patch you didn't create yourself, ``level`` can be handy. +``patch()`` functions +~~~~~~~~~~~~~~~~~~~~~~~~ + +In addition to supplying patch files, you can write a custom function +to patch a package's source. For example, the ``py-pyside`` package +contains some custom code for tweaking the way the PySide build +handles ``RPATH``: + +.. code-block:: python + :linenos: + + class PyPyside(Package): + ... + + def patch(self): + """Undo PySide RPATH handling and add Spack RPATH.""" + # Figure out the special RPATH + pypkg = self.spec['python'].package + rpath = self.rpath + rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) + + # Add Spack's standard CMake args to the sub-builds. + # They're called BY setup.py so we have to patch it. + filter_file( + r'OPTION_CMAKE,', + r'OPTION_CMAKE, ' + ( + '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), + 'setup.py') + + # PySide tries to patch ELF files to remove RPATHs + # Disable this and go with the one we set. + filter_file( + r'^\s*rpath_cmd\(pyside_path, srcpath\)', + r'#rpath_cmd(pyside_path, srcpath)', + 'pyside_postinstall.py') + +A ``patch`` function, if present, will be run after patch files are +applied and before ``install()`` is run. + +You could put this logic in ``install()``, but putting it in a patch +function gives you some benefits. First, spack ensures that the +``patch()`` function is run once per code checkout. That means that +if you run install, hit ctrl-C, and run install again, the code in the +patch function is only run once. Also, you can tell Spack to run only the patching part of the build using the .. + + Finding Package Downloads ---------------------------- @@ -1932,6 +1999,8 @@ A typical package workflow might look like this: Below are some commands that will allow you some finer-grained controll over the install process. +.. _spack-fetch: + ``spack fetch`` ~~~~~~~~~~~~~~~~~ @@ -1944,6 +2013,8 @@ directory will be located under ``$SPACK_HOME/var/spack``. When run after the archive has already been downloaded, ``spack fetch`` is idempotent and will not download the archive again. +.. _spack-stage: + ``spack stage`` ~~~~~~~~~~~~~~~~~ @@ -1952,6 +2023,8 @@ the downloaded archive in its temporary directory, where it will be built by ``spack install``. Similar to ``fetch``, if the archive has already been expanded, ``stage`` is idempotent. +.. _spack-patch: + ``spack patch`` ~~~~~~~~~~~~~~~~~ @@ -1963,7 +2036,6 @@ this step if they have been. If Spack discovers that patches didn't apply cleanly on some previous run, then it will restage the entire package before patching. - ``spack clean`` ~~~~~~~~~~~~~~~~~ @@ -2035,6 +2107,11 @@ to get rid of the install prefix before you build again: Graphing Dependencies -------------------------- +.. _spack-graph: + +``spack graph`` +~~~~~~~~~~~~~~~~~~~ + Spack provides the ``spack graph`` command for graphing dependencies. The command by default generates an ASCII rendering of a spec's dependency graph. For example:: diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py index 6583431124..bb5da44d02 100644 --- a/var/spack/packages/py-pyside/package.py +++ b/var/spack/packages/py-pyside/package.py @@ -43,5 +43,3 @@ class PyPyside(Package): python('setup.py', 'install', '--prefix=%s' % prefix, '--jobs=%s' % make_jobs) - - -- cgit v1.2.3-70-g09d2 From daa38d2ff4acd75b3b592bac5abd9ef642ee801f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 24 Feb 2015 02:33:29 -0800 Subject: SPACK-59: Documentation updates, bugfix in fetching. --- lib/spack/docs/.gitignore | 1 + lib/spack/docs/Makefile | 16 +- lib/spack/docs/basic_usage.rst | 37 +++- lib/spack/docs/command_index.in | 10 ++ lib/spack/docs/features.rst | 2 +- lib/spack/docs/index.rst | 2 + lib/spack/docs/mirrors.rst | 217 +++++++++++++++++++++++ lib/spack/docs/packaging_guide.rst | 312 ++++++++++++++++++++++++++++++++-- lib/spack/docs/site_configuration.rst | 193 --------------------- lib/spack/spack/mirror.py | 2 +- lib/spack/spack/package.py | 5 +- lib/spack/spack/test/git_fetch.py | 4 +- lib/spack/spack/test/hg_fetch.py | 4 +- lib/spack/spack/test/svn_fetch.py | 4 +- 14 files changed, 585 insertions(+), 224 deletions(-) create mode 100644 lib/spack/docs/command_index.in create mode 100644 lib/spack/docs/mirrors.rst diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore index 7701dd9f12..26c343d3eb 100644 --- a/lib/spack/docs/.gitignore +++ b/lib/spack/docs/.gitignore @@ -1,3 +1,4 @@ package_list.rst +command_index.rst spack*.rst _build diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile index a660e1255d..00203b5b61 100644 --- a/lib/spack/docs/Makefile +++ b/lib/spack/docs/Makefile @@ -27,6 +27,18 @@ all: html package_list: spack package-list > package_list.rst +# +# Generate a command index +# +command_index: + cp command_index.in command_index.rst + echo >> command_index.rst + grep -ho '.. _spack-.*:' *rst \ + | perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \ + | sort >> command_index.rst + +custom_targets: package_list command_index + # # This creates a git repository and commits generated html docs. # It them pushes the new branch into THIS repository as gh-pages. @@ -77,10 +89,10 @@ help: @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: - -rm -f package_list.rst + -rm -f package_list.rst command_index.rst -rm -rf $(BUILDDIR)/* $(APIDOC_FILES) -html: apidoc package_list +html: apidoc custom_targets $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index bd25d739ea..3d808708e1 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -16,6 +16,8 @@ software. Before that, you need to know what's available. You can see avaialble package names either using the :ref:`package-list`, or using the commands below. +.. _spack-list: + ``spack list`` ~~~~~~~~~~~~~~~~ @@ -31,6 +33,7 @@ do wildcard searches using ``*``: .. command-output:: spack list *util* +.. _spack-info: ``spack info`` ~~~~~~~~~~~~~~~~ @@ -47,6 +50,8 @@ attacks. :ref:`Dependencies ` and :ref:`virtual dependencies `, are described in more detail later. +.. _spack-versions: + ``spack versions`` ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -69,6 +74,8 @@ Installing and uninstalling Now that you know how to list avaiable packages and versions, you're ready to start installing things. +.. _spack-install: + ``spack install`` ~~~~~~~~~~~~~~~~~~~~~ @@ -138,6 +145,7 @@ configuration a **spec**. In the command lines above, both ``mpileaks`` and ``mpileaks@3.0.4`` are specs. Specs are described in detail in :ref:`sec-specs`. +.. _spack-uninstall: ``spack uninstall`` ~~~~~~~~~~~~~~~~~~~~~ @@ -170,6 +178,7 @@ Seeing installed packages We know that ``spack list`` shows you the names of available packages, but how do you figure out which are installed? +.. _spack-find: ``spack find`` ~~~~~~~~~~~~~~~~~~~~~~ @@ -303,7 +312,7 @@ of libelf would look like this: The full spec syntax is discussed in detail in :ref:`sec-specs`. -Compiler Configuration +Compiler configuration ----------------------------------- Spack has the ability to build packages with multiple compilers and @@ -311,6 +320,8 @@ compiler versions. Spack searches for compilers on your machine automatically the first time it is run. It does this by inspecting your path. +.. _spack-compilers: + ``spack compilers`` ~~~~~~~~~~~~~~~~~~~~~~~ @@ -337,6 +348,8 @@ compilers`` or ``spack compiler list``:: Any of these compilers can be used to build Spack packages. More on how this is done is in :ref:`sec-specs`. +.. _spack-compiler-add: + ``spack compiler add`` ~~~~~~~~~~~~~~~~~~~~~~~ @@ -361,6 +374,7 @@ installed, but you know that new compilers have been added to your This loads the environment module for gcc-4.9.0 to get it into the ``PATH``, and then it adds the compiler to Spack. +.. _spack-compiler-info: ``spack compiler info`` ~~~~~~~~~~~~~~~~~~~~~~~ @@ -382,7 +396,7 @@ matching Intel compiler was displayed. Manual compiler configuration -~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If autodetection fails, you can manually conigure a compiler by editing your ``~/.spackconfig`` file. You can do this by running @@ -413,7 +427,7 @@ list displayed by ``spack compilers``. .. _sec-specs: -Specs & Dependencies +Specs & dependencies ------------------------- We know that ``spack install``, ``spack uninstall``, and other @@ -720,6 +734,8 @@ any MPI implementation will do. If another package depends on error. Likewise, if you try to plug in some package that doesn't provide MPI, Spack will raise an error. +.. _spack-providers: + ``spack providers`` ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -739,7 +755,7 @@ versions are now filtered out. .. _shell-support: -Environment Modules +Environment modules ------------------------------- .. note:: @@ -787,6 +803,7 @@ The directories are automatically added to your ``MODULEPATH`` and ``DK_NODE`` environment variables when you enable Spack's `shell support `_. + Using Modules & Dotkits ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -934,6 +951,8 @@ if newer, fancier module support is added to Spack at some later date, you may want to regenerate all the modules to take advantage of these new features. +.. _spack-module: + ``spack module refresh`` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -950,7 +969,7 @@ regenerate all module and dotkit files from scratch: .. _extensions: -Extensions & Python Support +Extensions & Python support ------------------------------------ Spack's installation model assumes that each package will live in its @@ -971,6 +990,8 @@ an *extension*. Suppose you have Python installed like so: -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- python@2.7.8 +.. _spack-extensions: + ``spack extensions`` ~~~~~~~~~~~~~~~~~~~~~~~ @@ -1004,6 +1025,7 @@ they are packages like any ohter. They are installed into their own prefixes, and you can see this with ``spack find -p``: .. code-block:: sh + $ spack find -p py-numpy ==> 1 installed packages. -- chaos_5_x86_64_ib / gcc@4.4.7 -------------------------------- @@ -1060,6 +1082,8 @@ for this case. Instead of requiring users to load particular environment modules, you can *activate* the package within the Python installation: +.. _spack-activate: + ``spack activate`` ^^^^^^^^^^^^^^^^^^^^^^^ @@ -1133,6 +1157,7 @@ dependencies, you can use ``spack activate -f``: $ spack activate -f py-numpy ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7. +.. _spack-deactivate: ``spack deactivate`` ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1159,6 +1184,8 @@ several variants: Getting Help ----------------------- +.. _spack-help: + ``spack help`` ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in new file mode 100644 index 0000000000..94cdf38109 --- /dev/null +++ b/lib/spack/docs/command_index.in @@ -0,0 +1,10 @@ +.. _command_index: + +Command index +================= + +This is an alphabetical list of commands with links to the places they +appear in the documentation. + +.. hlist:: + :columns: 3 diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst index b39dcd3390..fcb810086d 100644 --- a/lib/spack/docs/features.rst +++ b/lib/spack/docs/features.rst @@ -1,4 +1,4 @@ -Feature Overview +Feature overview ================== This is a high-level overview of features that make Spack different diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 73eff43ab7..2382678cc3 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -46,8 +46,10 @@ Table of Contents getting_started basic_usage packaging_guide + mirrors site_configuration developer_guide + command_index package_list API Docs diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst new file mode 100644 index 0000000000..57ca1af068 --- /dev/null +++ b/lib/spack/docs/mirrors.rst @@ -0,0 +1,217 @@ +.. _mirrors: + +Mirrors +============================ + +Some sites may not have access to the internet for fetching packages. +These sites will need a local repository of tarballs from which they +can get their files. Spack has support for this with *mirrors*. A +mirror is a URL that points to a directory, either on the local +filesystem or on some server, containing tarballs for all of Spack's +packages. + +Here's an example of a mirror's directory structure:: + + mirror/ + cmake/ + cmake-2.8.10.2.tar.gz + dyninst/ + dyninst-8.1.1.tgz + dyninst-8.1.2.tgz + libdwarf/ + libdwarf-20130126.tar.gz + libdwarf-20130207.tar.gz + libdwarf-20130729.tar.gz + libelf/ + libelf-0.8.12.tar.gz + libelf-0.8.13.tar.gz + libunwind/ + libunwind-1.1.tar.gz + mpich/ + mpich-3.0.4.tar.gz + mvapich2/ + mvapich2-1.9.tgz + +The structure is very simple. There is a top-level directory. The +second level directories are named after packages, and the third level +contains tarballs for each package, named after each package. + +.. note:: + + Archives are **not** named exactly they were in the package's fetch + URL. They have the form ``-.``, where + ```` is Spack's name for the package, ```` is the + version of the tarball, and ```` is whatever format the + package's fetch URL contains. + + In order to make mirror creation reasonably fast, we copy the + tarball in its original format to the mirror directory, but we do + not standardize on a particular compression algorithm, because this + would potentially require expanding and recompressing each archive. + +.. _spack-mirror: + +``spack mirror`` +---------------------------- + +Mirrors are managed with the ``spack mirror`` command. The help for +``spack mirror`` looks like this:: + + $ spack mirror -h + usage: spack mirror [-h] SUBCOMMAND ... + + positional arguments: + SUBCOMMAND + create Create a directory to be used as a spack mirror, and fill + it with package archives. + add Add a mirror to Spack. + remove Remove a mirror by name. + list Print out available mirrors to the console. + + optional arguments: + -h, --help show this help message and exit + +The ``create`` command actually builds a mirror by fetching all of its +packages from the internet and checksumming them. + +The other three commands are for managing mirror configuration. They +control the URL(s) from which Spack downloads its packages. + +.. _spack-mirror-create: + +``spack mirror create`` +---------------------------- + +You can create a mirror using the ``spack mirror create`` command, assuming +you're on a machine where you can access the internet. + +The command will iterate through all of Spack's packages and download +the safe ones into a directory structure like the one above. Here is +what it looks like: + + +.. code-block:: bash + + $ spack mirror create libelf libdwarf + ==> Created new mirror in spack-mirror-2014-06-24 + ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz + ########################################################## 81.6% + ==> Checksum passed for libelf@0.8.13 + ==> Added libelf@0.8.13 + ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz + ###################################################################### 98.6% + ==> Checksum passed for libelf@0.8.12 + ==> Added libelf@0.8.12 + ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz + ###################################################################### 97.3% + ==> Checksum passed for libdwarf@20130207 + ==> Added libdwarf@20130207 + ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz + ######################################################## 78.9% + ==> Checksum passed for libdwarf@20130126 + ==> Added libdwarf@20130126 + ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz + ############################################################# 84.7% + ==> Added libdwarf@20130729 + ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror + ==> Added python@2.7.8. + ==> Successfully updated mirror in spack-mirror-2015-02-24. + Archive stats: + 0 already present + 5 added + 0 failed to fetch. + +Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and +copy it over to the machine you want it hosted on. + +Custom package sets +~~~~~~~~~~~~~~~~~~~~~~~ + +Normally, ``spack mirror create`` downloads all the archives it has +checksums for. If you want to only create a mirror for a subset of +packages, you can do that by supplying a list of package specs on the +command line after ``spack mirror create``. For example, this +command:: + + $ spack mirror create libelf@0.8.12: boost@1.44: + +Will create a mirror for libelf versions greater than or equal to +0.8.12 and boost versions greater than or equal to 1.44. + +Mirror files +~~~~~~~~~~~~~~~~~~~~~~~ + +If you have a *very* large number of packages you want to mirror, you +can supply a file with specs in it, one per line:: + + $ cat specs.txt + libdwarf + libelf@0.8.12: + boost@1.44: + boost@1.39.0 + ... + $ spack mirror create -f specs.txt + ... + +This is useful if there is a specific suite of software managed by +your site. + +.. _spack-mirror-add: + +``spack mirror add`` +---------------------------- + +Once you have a mirrror, you need to let spack know about it. This is +relatively simple. First, figure out the URL for the mirror. If it's +a file, you can use a file URL like this one:: + + file:///Users/gamblin2/spack-mirror-2014-06-24 + +That points to the directory on the local filesystem. If it were on a +web server, you could use a URL like this one: + + https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 + +Spack will use the URL as the root for all of the packages it fetches. +You can tell your Spack installation to use that mirror like this: + +.. code-block:: bash + + $ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24 + +Each mirror has a name so that you can refer to it again later. + +.. _spack-mirror-list: + +``spack mirror list`` +---------------------------- + +If you want to see all the mirrors Spack knows about you can run ``spack mirror list``:: + + $ spack mirror list + local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24 + +.. _spack-mirror-remove: + +``spack mirror remove`` +---------------------------- + +And, if you want to remove a mirror, just remove it by name:: + + $ spack mirror remove local_filesystem + $ spack mirror list + ==> No mirrors configured. + +Mirror precedence +---------------------------- + +Adding a mirror really just adds a section in ``~/.spackconfig``:: + + [mirror "local_filesystem"] + url = file:///Users/gamblin2/spack-mirror-2014-06-24 + [mirror "remote_server"] + url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 + +If you want to change the order in which mirrors are searched for +packages, you can edit this file and reorder the sections. Spack will +search the topmost mirror first and the bottom-most mirror last. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index bc3bacf8f2..8b4c0a4ce1 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -28,7 +28,7 @@ ubiquitous in the scientific software community. Second, it's a modern language and has many powerful features to help make package writing easy. -Creating & Editing Packages +Creating & editing packages ---------------------------------- .. _spack-create: @@ -254,7 +254,7 @@ This is useful when ``spack create`` cannot figure out the name and version of your package from the archive URL. -Naming & Directory Structure +Naming & directory structure -------------------------------------- .. note:: @@ -497,7 +497,7 @@ versions. See the documentation on `attribute_list_url`_ and .. _vcs-fetch: -Fetching from VCS Repositories +Fetching from VCS repositories -------------------------------------- For some packages, source code is provided in a Version Control System @@ -774,6 +774,8 @@ to patch a package's source. For example, the ``py-pyside`` package contains some custom code for tweaking the way the PySide build handles ``RPATH``: +.. _pyside-patch: + .. code-block:: python :linenos: @@ -810,14 +812,59 @@ You could put this logic in ``install()``, but putting it in a patch function gives you some benefits. First, spack ensures that the ``patch()`` function is run once per code checkout. That means that if you run install, hit ctrl-C, and run install again, the code in the -patch function is only run once. Also, you can tell Spack to run only the patching part of the build using the .. +patch function is only run once. Also, you can tell Spack to run only +the patching part of the build using the :ref:`spack-patch` command. + +Handling RPATHs +---------------------------- + +Spack installs each package in a way that ensures that all of its +dependencies are found when it runs. It does this using `RPATHs +`_. An RPATH is a search +path, stored in a binary (an executable or library), that tells the +dynamic loader where to find its dependencies at runtime. You may be +familiar with ```LD_LIBRARY_PATH`` +`_ +on Linux or ```DYLD_LIBRARY_PATH`` +` +on Mac OS X. RPATH is similar to these paths, in that it tells +the loader where to find libraries. Unlike them, it is embedded in +the binary and not set in each user's environment. + +RPATHs in Spack are handled in one of three ways: + + 1. For most packages, RPATHs are handled automatically using Spack's + :ref:`compiler wrappers `. These wrappers are + set in standard variables like ``CC``, ``CXX``, and ``FC``, so + most build systems (autotools and many gmake systems) pick them + up and use them. + 2. CMake also respects Spack's compiler wrappers, but many CMake + builds have logic to overwrite RPATHs when binaries are + installed. Spack provides the ``std_cmake_args`` variable, which + includes parameters necessary for CMake build use the right + installation RPATH. It can be used like this when ``cmake`` is + invoked: + + .. code-block:: python + + class MyPackage(Package): + ... + def install(self, spec, prefix): + cmake('..', *std_cmake_args) + make() + make('install') + 3. If you need to modify the build to add your own RPATHs, you can + use the ``self.rpath`` property of your package, which will + return a list of all the RPATHs that Spack will use when it + links. You can see this how this is used in the :ref:`PySide + example ` above. -Finding Package Downloads +Finding new versions ---------------------------- -We've already seen the ``homepage`` and ``url`` package attributes: +You've already seen the ``homepage`` and ``url`` package attributes: .. code-block:: python :linenos: @@ -853,7 +900,7 @@ url is: url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" -Spack spiders ``http://www.mr511.de/software/`` to find similar +Here, Spack spiders ``http://www.mr511.de/software/`` to find similar tarball links and ultimately to make a list of available versions of ``libelf``. @@ -907,7 +954,7 @@ when spidering the page. .. _attribute_parallel: -Parallel Builds +Parallel builds ------------------ By default, Spack will invoke ``make()`` with a ``-j `` @@ -1036,6 +1083,203 @@ command line to find installed packages or to install packages with particular constraints, and package authors can use specs to describe relationships between packages. +.. _setup-dependent-environment: + +``setup_dependent_environment()`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Spack provides a mechanism for dependencies to provide variables that +can be used in their dependents' build. Any package can declare a +``setup_dependent_environment()`` function, and this function will be +called before the ``install()`` method of any dependent packages. +This allows dependencies to set up environment variables and other +properties to be used by dependents. + +The funciton declaration should look like this: + +.. code-block:: python + + class Qt(Package): + ... + def setup_dependent_environment(self, module, spec, dep_spec): + """Dependencies of Qt find it using the QTDIR environment variable.""" + os.environ['QTDIR'] = self.prefix + +Here, the Qt package sets the ``QTDIR`` environment variable so that +packages that depend on a particular Qt installation will find it. + +The arguments to this function are: + + * **module**: the module of the dependent package, where global + properties can be assigned. + * **spec**: the spec of the *dependency package* (the one the function is called on). + * **dep_spec**: the spec of the dependent package (i.e. dep_spec depends on spec). + +A goo example of using these is in the Python packge: + +.. code-block:: python + + def setup_dependent_environment(self, module, spec, dep_spec): + # Python extension builds can have a global python executable function + module.python = Executable(join_path(spec.prefix.bin, 'python')) + + # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. + module.python_lib_dir = os.path.join(dep_spec.prefix, self.python_lib_dir) + module.python_include_dir = os.path.join(dep_spec.prefix, self.python_include_dir) + module.site_packages_dir = os.path.join(dep_spec.prefix, self.site_packages_dir) + + # Make the site packages directory if it does not exist already. + mkdirp(module.site_packages_dir) + + # Set PYTHONPATH to include site-packages dir for the + # extension and any other python extensions it depends on. + python_paths = [] + for d in dep_spec.traverse(): + if d.package.extends(self.spec): + python_paths.append(os.path.join(d.prefix, self.site_packages_dir)) + os.environ['PYTHONPATH'] = ':'.join(python_paths) + +The first thing that happens here is that the ``python`` command is +inserted into module scope of the dependent. This allows most python +packages to have a very simple install method, like this: + +.. code-block:: python + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + +Python's ``setup_dependent_environment`` method also sets up smoe +other variables, creates a directory, and sets up the ``PYTHONPATH`` +so that dependent packages can find their dependencies at build time. + + +.. _packaging_extensions: + +Extensions +------------------------- + +Spack's support for package extensions is documented extensively in +:ref:`extensions`. This section documents how to make your own +extendable packages and extensions. + +To support extensions, a package needs to set its ``extendable`` +property to ``True``, e.g.: + +.. code-block:: python + + class Python(Package): + ... + extendable = True + ... + +To make a package into an extension, simply add simply add an +``extends`` call in the package definition, and pass it the name of an +extendable package: + +.. code-block:: python + + class PyNumpy(Package): + ... + extends('python') + ... + +Now, the ``py-numpy`` package can be used as an argument to ``spack +activate``. When it is activated, all the files in its prefix will be +symbolically linked into the prefix of the python package. + +Sometimes, certain files in one package will conflict with those in +another, which means they cannot both be activated (symlinked) at the +same time. In this case, you can tell Spack to ignore those files +when it does the activation: + +.. code-block:: python + + class PyNose(Package): + ... + extends('python', ignore=r'bin/nosetests.*$') + ... + +The code above will prevent ``$prefix/bin/nosetests`` from being +linked in at activation time. + +.. note:: + + You can call *either* ``depends_on`` or ``extends`` on any one + package, but not both. For example you cannot both + ``depends_on('python')`` and ``extends(python)`` in the same + package. ``extends`` implies ``depends_on``. + + + +Activation & deactivation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Spack's ``Package`` class has default ``activate`` and ``deactivate`` +implementations that handle symbolically linking extensions' prefixes +into the directory of the parent package. However, extendable +packages can override these methdos to add custom activate/deactivate +logic of their own. For example, the ``activate`` and ``deactivate`` +methods in the Python class use the symbolic linking, but they also +handle details surrounding Python's ``.pth`` files, and other aspects +of Python packaging. + +Spack's extensions mechanism is designed to be extensible, so that +other packages (like Ruby, R, Perl, etc.) can provide their own +custom extension management logic, as they may not handle modules the +same way that Python does. + +Let's look at Python's activate function: + +.. code-block:: python + + def activate(self, ext_pkg, **kwargs): + kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs)) + super(Python, self).activate(ext_pkg, **kwargs) + + exts = spack.install_layout.extension_map(self.spec) + exts[ext_pkg.name] = ext_pkg.spec + self.write_easy_install_pth(exts) + +This function is called on the *extendee* (Python). It first calls +``activate`` in the superclass, which handles symlinking the +extension package's prefix into this package's prefix. It then does +some special handling of the ``easy-install.pth`` file, part of +Python's setuptools. + +Deactivate behaves similarly to activate, but it unlinks files: + +.. code-block:: python + + def deactivate(self, ext_pkg, **kwargs): + kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs)) + super(Python, self).deactivate(ext_pkg, **kwargs) + + exts = spack.install_layout.extension_map(self.spec) + if ext_pkg.name in exts: # Make deactivate idempotent. + del exts[ext_pkg.name] + self.write_easy_install_pth(exts) + +Both of these methods call some custom functions in the Python +package. See the source for Spack's Python package for details. + + +Activation arguments +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You may have noticed that the ``activate`` function defined above +takes keyword arguments. These are the keyword arguments from +``extends()``, and they are passed to both activate and deactivate. + +This capability allows an extension to customize its own activation by +passing arguments to the extendee. Extendees can likewise implement +custom ``activate()`` and ``deactivate()`` functions to suit their +needs. + +The only keyword argument supported by default is the ``ignore`` +argument, which can take a regex, list of regexes, or a predicate to +determine which files *not* to symlink during activation. + + .. _virtual-dependencies: Virtual dependencies @@ -1257,6 +1501,7 @@ explicitly. Concretization policies are discussed in more detail in :ref:`site-configuration`. Sites using Spack can customize them to match the preferences of their own users. +.. _spack-spec: ``spack spec`` ~~~~~~~~~~~~~~~~~~~~ @@ -1354,7 +1599,7 @@ information. .. _install-environment: -The Install environment +The install environment -------------------------- In general, you should not have to do much differently in your install @@ -1414,6 +1659,7 @@ easily: ``PATH`` Set to point to ``/bin`` directories of dpeendencies ``CMAKE_PREFIX_PATH`` Path to dependency prefixes for CMake ``PKG_CONFIG_PATH`` Path to any pkgconfig directories for dependencies + ``PYTHONPATH`` Path to site-packages dir of any python dependencies ======================= ============================= ``PATH`` is set up to point to dependencies ``/bin`` directories so @@ -1433,6 +1679,12 @@ dependencies using the GNU ``pkg-config`` tool. It is similar to ``CMAKE_PREFIX_PATH`` in that it allows a build to automatically discover its dependencies. +If you want to see the environment that a package will build with, or +if you want to run commands in that environment to test them out, you +can use the :ref:```spack env`` ` command, documented +below. + +.. _compiler-wrappers: Compiler interceptors ~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1498,7 +1750,6 @@ process runs. Packages are free to change the environment or to modify Spack internals, because each ``install()`` call has its own dedicated process. - .. _prefix-objects: Prefix objects @@ -1970,7 +2221,7 @@ File functions .. _pacakge-lifecycle: -Package Workflow Commands +Packaging workflow commands --------------------------------- When you are building packages, you will likely not get things @@ -2036,6 +2287,8 @@ this step if they have been. If Spack discovers that patches didn't apply cleanly on some previous run, then it will restage the entire package before patching. +.. _spack-restage: + ``spack restage`` ~~~~~~~~~~~~~~~~~ Restores the source code to pristine state, as it was before building. @@ -2048,6 +2301,7 @@ Does this in one of two ways: 2. If the source was checked out from a repository, this deletes the build directory and checks it out again. +.. _spack-clean: ``spack clean`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2057,6 +2311,8 @@ expanded/checked out source code *and* any downloaded archive. If build process will start from scratch. +.. _spack-purge: + ``spack purge`` ~~~~~~~~~~~~~~~~~ Cleans up all of Spack's temporary files. Use this to recover disk @@ -2102,7 +2358,7 @@ to get rid of the install prefix before you build again: spack uninstall -f -Graphing Dependencies +Graphing dependencies -------------------------- .. _spack-graph: @@ -2181,7 +2437,7 @@ example:: This graph can be provided as input to other graphing tools, such as those in `Graphviz `_. -Interactive Shell Support +Interactive shell support -------------------------- Spack provides some limited shell support to make life easier for @@ -2197,6 +2453,7 @@ For ``csh`` and ``tcsh`` run: ``spack cd`` will then be available. +.. _spack-cd: ``spack cd`` ~~~~~~~~~~~~~~~~~ @@ -2227,6 +2484,35 @@ directory, install directory, package directory) and others change to core spack locations. For example, ``spack cd -m`` will take you to the main python source directory of your spack install. +.. _spack-env: + +``spack env`` +~~~~~~~~~~~~~~~~~~~~~~ + +``spack env`` functions much like the standard unix ``env`` command, +but it takes a spec as an argument. You can use it to see the +environment variables that will be set when a particular build runs, +for example: + +.. code-block:: sh + + $ spack env mpileaks@1.1%intel + +This will display the entire environment that will be set when the +``mpileaks@1.1%intel`` build runs. + +To run commands in a package's build environment, you can simply provided them after the spec argument to ``spack env``: + +.. code-block:: sh + + $ spack cd mpileaks@1.1%intel + $ spack env mpileaks@1.1%intel ./configure + +This will cd to the build directory and then run ``configure`` in the +package's build environment. + + +.. _spack-location: ``spack location`` ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index 4936e3052c..a3e19cc800 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -3,199 +3,6 @@ Site-specific configuration =================================== -.. _mirrors: - -Mirrors ----------------------------- - -Some sites may not have access to the internet for fetching packages. -These sites will need a local repository of tarballs from which they -can get their files. Spack has support for this with *mirrors*. A -mirror is a URL that points to a directory, either on the local -filesystem or on some server, containing tarballs for all of Spack's -packages. - -Here's an example of a mirror's directory structure:: - - mirror/ - cmake/ - cmake-2.8.10.2.tar.gz - dyninst/ - DyninstAPI-8.1.1.tgz - DyninstAPI-8.1.2.tgz - libdwarf/ - libdwarf-20130126.tar.gz - libdwarf-20130207.tar.gz - libdwarf-20130729.tar.gz - libelf/ - libelf-0.8.12.tar.gz - libelf-0.8.13.tar.gz - libunwind/ - libunwind-1.1.tar.gz - mpich/ - mpich-3.0.4.tar.gz - mvapich2/ - mvapich2-1.9.tgz - -The structure is very simple. There is a top-level directory. The -second level directories are named after packages, and the third level -contains tarballs for each package, named as they were in the -package's fetch URL. - -``spack mirror`` -~~~~~~~~~~~~~~~~~~~~~~~ - -Mirrors are managed with the ``spack mirror`` command. The help for -``spack mirror`` looks like this:: - - $ spack mirror -h - usage: spack mirror [-h] SUBCOMMAND ... - - positional arguments: - SUBCOMMAND - create Create a directory to be used as a spack mirror, and fill - it with package archives. - add Add a mirror to Spack. - remove Remove a mirror by name. - list Print out available mirrors to the console. - - optional arguments: - -h, --help show this help message and exit - -The ``create`` command actually builds a mirror by fetching all of its -packages from the internet and checksumming them. - -The other three commands are for managing mirror configuration. They -control the URL(s) from which Spack downloads its packages. - - -``spack mirror create`` -~~~~~~~~~~~~~~~~~~~~~~~ - -You can create a mirror using the ``spack mirror create`` command, assuming -you're on a machine where you can access the internet. - -The command will iterate through all of Spack's packages and download -the safe ones into a directory structure like the one above. Here is -what it looks like: - - -.. code-block:: bash - - $ spack mirror create libelf libdwarf - ==> Created new mirror in spack-mirror-2014-06-24 - ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz - ########################################################## 81.6% - ==> Checksum passed for libelf@0.8.13 - ==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.13.tar.gz to mirror - ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz - ###################################################################### 98.6% - ==> Checksum passed for libelf@0.8.12 - ==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.12.tar.gz to mirror - ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz - ###################################################################### 97.3% - ==> Checksum passed for libdwarf@20130207 - ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130207.tar.gz to mirror - ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz - ######################################################## 78.9% - ==> Checksum passed for libdwarf@20130126 - ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130126.tar.gz to mirror - ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz - ############################################################# 84.7% - ==> Checksum passed for libdwarf@20130729 - ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror - -Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and -copy it over to the machine you want it hosted on. - -Custom package sets -^^^^^^^^^^^^^^^^^^^^^^^^ - -Normally, ``spack mirror create`` downloads all the archives it has -checksums for. If you want to only create a mirror for a subset of -packages, you can do that by supplying a list of package specs on the -command line after ``spack mirror create``. For example, this -command:: - - $ spack mirror create libelf@0.8.12: boost@1.44: - -Will create a mirror for libelf versions greater than or equal to -0.8.12 and boost versions greater than or equal to 1.44. - -Mirror files -^^^^^^^^^^^^^^^^^^^^^^^^ - -If you have a *very* large number of packages you want to mirror, you -can supply a file with specs in it, one per line:: - - $ cat specs.txt - libdwarf - libelf@0.8.12: - boost@1.44: - boost@1.39.0 - ... - $ spack mirror create -f specs.txt - ... - -This is useful if there is a specific suite of software managed by -your site. - - -``spack mirror add`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Once you have a mirrror, you need to let spack know about it. This is -relatively simple. First, figure out the URL for the mirror. If it's -a file, you can use a file URL like this one:: - - file:///Users/gamblin2/spack-mirror-2014-06-24 - -That points to the directory on the local filesystem. If it were on a -web server, you could use a URL like this one: - - https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 - -Spack will use the URL as the root for all of the packages it fetches. -You can tell your Spack installation to use that mirror like this: - -.. code-block:: bash - - $ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24 - -Each mirror has a name so that you can refer to it again later. - -``spack mirror list`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If you want to see all the mirrors Spack knows about you can run ``spack mirror list``:: - - $ spack mirror list - local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24 - -``spack mirror remove`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -And, if you want to remove a mirror, just remove it by name:: - - $ spack mirror remove local_filesystem - $ spack mirror list - ==> No mirrors configured. - -Mirror precedence -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Adding a mirror really just adds a section in ``~/.spackconfig``:: - - [mirror "local_filesystem"] - url = file:///Users/gamblin2/spack-mirror-2014-06-24 - [mirror "remote_server"] - url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 - -If you want to change the order in which mirrors are searched for -packages, you can edit this file and reorder the sections. Spack will -search the topmost mirror first and the bottom-most mirror last. - - .. _temp-space: Temporary space diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 114c7b6a35..306c8085aa 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -146,7 +146,7 @@ def create(path, specs, **kwargs): stage = None try: # create a subdirectory for the current package@version - archive_path = join_path(path, mirror_archive_path(spec)) + archive_path = os.path.abspath(join_path(path, mirror_archive_path(spec))) subdir = os.path.dirname(archive_path) mkdirp(subdir) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 137e8f8837..75e6142a9d 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -287,10 +287,9 @@ class Package(object): .. code-block:: python - p.do_clean() # runs make clean - p.do_clean_work() # removes the build directory and + p.do_clean() # removes the stage directory entirely + p.do_restage() # removes the build directory and # re-expands the archive. - p.do_clean_dist() # removes the stage directory entirely The convention used here is that a do_* function is intended to be called internally by Spack commands (in spack.cmd). These aren't for package diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index f6d9bfcf05..04422adb57 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -61,7 +61,7 @@ class GitFetchTest(MockPackagesTest): if self.repo.stage is not None: self.repo.stage.destroy() - self.pkg.do_clean_dist() + self.pkg.do_clean() def assert_rev(self, rev): @@ -93,7 +93,7 @@ class GitFetchTest(MockPackagesTest): untracked_file = 'foobarbaz' touch(untracked_file) self.assertTrue(os.path.isfile(untracked_file)) - self.pkg.do_clean_work() + self.pkg.do_restage() self.assertFalse(os.path.isfile(untracked_file)) self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 97c5b665e7..e1ab2cffe6 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -60,7 +60,7 @@ class HgFetchTest(MockPackagesTest): if self.repo.stage is not None: self.repo.stage.destroy() - self.pkg.do_clean_dist() + self.pkg.do_clean() def try_fetch(self, rev, test_file, args): @@ -87,7 +87,7 @@ class HgFetchTest(MockPackagesTest): untracked = 'foobarbaz' touch(untracked) self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_clean_work() + self.pkg.do_restage() self.assertFalse(os.path.isfile(untracked)) self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index a48a86dcc3..0159fb087f 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -60,7 +60,7 @@ class SvnFetchTest(MockPackagesTest): if self.repo.stage is not None: self.repo.stage.destroy() - self.pkg.do_clean_dist() + self.pkg.do_clean() def assert_rev(self, rev): @@ -99,7 +99,7 @@ class SvnFetchTest(MockPackagesTest): untracked = 'foobarbaz' touch(untracked) self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_clean_work() + self.pkg.do_restage() self.assertFalse(os.path.isfile(untracked)) self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) -- cgit v1.2.3-70-g09d2 From 447e2959476b84b317dbc5acba623a66968aa7c4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 24 Feb 2015 02:45:21 -0800 Subject: SPACK-62: fix for site docs. --- lib/spack/docs/site_configuration.rst | 52 ++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index a3e19cc800..e076aec763 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -1,6 +1,6 @@ .. _site-configuration: -Site-specific configuration +Site configuration =================================== .. _temp-space: @@ -134,3 +134,53 @@ Set concretizer to *your own* class instead of the default: concretizer = MyConcretizer() The next time you run Spack, your changes should take effect. + + +Profiling +~~~~~~~~~~~~~~~~~~~~~ + +Spack has some limited builtin support for profiling, and can report +statistics using standard Python timing tools. To use this feature, +supply ``-p`` to Spack on the command line, before any subcommands. + +.. _spack-p: + +``spack -p`` +^^^^^^^^^^^^^^^^^^ + +``spack -p`` output looks like this: + +.. code-block:: sh + + $ spack -p graph dyninst + o dyninst + |\ + | |\ + | o | libdwarf + |/ / + o | libelf + / + o boost + + 307670 function calls (305943 primitive calls) in 0.127 seconds + + Ordered by: internal time + + ncalls tottime percall cumtime percall filename:lineno(function) + 853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule) + 51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule) + 73961 0.010 0.000 0.010 0.000 {isinstance} + 1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile) + 32075 0.006 0.000 0.006 0.000 {hasattr} + 1760 0.004 0.000 0.004 0.000 {posix.stat} + 2240 0.004 0.000 0.004 0.000 {posix.lstat} + 2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile) + 771 0.004 0.000 0.077 0.000 inspect.py:518(findsource) + 2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects} + 30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects} + ... + +The bottom of the output shows the top most time consuming functions, +slowest on top. The profiling support is from Python's builtin tool, +`cProfile +`_. -- cgit v1.2.3-70-g09d2 From 5eb7e466545ffc2e954a283074ec661bca87afa3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 24 Feb 2015 10:26:26 -0800 Subject: Spell check docs --- lib/spack/docs/basic_usage.rst | 22 +++++++-------- lib/spack/docs/developer_guide.rst | 10 +++---- lib/spack/docs/mirrors.rst | 4 +-- lib/spack/docs/packaging_guide.rst | 52 +++++++++++++++++------------------ lib/spack/docs/site_configuration.rst | 8 +++--- 5 files changed, 48 insertions(+), 48 deletions(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 3d808708e1..ed79790bb9 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -13,7 +13,7 @@ Listing available packages The first thing you likely want to do with spack is to install some software. Before that, you need to know what's available. You can -see avaialble package names either using the :ref:`package-list`, or +see available package names either using the :ref:`package-list`, or using the commands below. .. _spack-list: @@ -27,7 +27,7 @@ Spack can install: .. command-output:: spack list The packages are listed by name in alphabetical order. You can also -do wildcard searches using ``*``: +do wildcats searches using ``*``: .. command-output:: spack list m* @@ -64,14 +64,14 @@ There are two sections in the output. *Safe versions* are ones that have already been checksummed. Spack goes a step further, though, and also shows you what versions are available out on the web---these are *remote versions*. Spack gets this information by scraping it -directly from webpages. Depending on the package, Spack may or may +directly from web pages. Depending on the package, Spack may or may not be able to find any remote versions. Installing and uninstalling ------------------------------ -Now that you know how to list avaiable packages and versions, you're +Now that you know how to list available packages and versions, you're ready to start installing things. .. _spack-install: @@ -80,7 +80,7 @@ ready to start installing things. ~~~~~~~~~~~~~~~~~~~~~ ``spack install`` will install any package shown by ``spack list``. -To install the latest version of a pacakge, along with all of its +To install the latest version of a package, along with all of its dependencies, simply give it a package name: .. code-block:: sh @@ -227,7 +227,7 @@ Packages are divided into groups according to their architecture and compiler. Within each group, Spack tries to keep the view simple, and only shows the version of installed packages. -In some cases, there may be differnt configurations of the *same* +In some cases, there may be different configurations of the *same* version of a package installed. For example, there are two installations of of ``libdwarf@20130729`` above. We can look at them in more detail using ``spack find -d``, and by asking only to show @@ -362,7 +362,7 @@ where the compiler is installed. For example:: intel@13.0.079 Or you can run ``spack compiler add`` with no arguments to force -autodetection. This is useful if you do not know where compilers are +auto-detection. This is useful if you do not know where compilers are installed, but you know that new compilers have been added to your ``PATH``. For example, using dotkit, you might do this:: @@ -398,7 +398,7 @@ matching Intel compiler was displayed. Manual compiler configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If autodetection fails, you can manually conigure a compiler by +If auto-detection fails, you can manually configure a compiler by editing your ``~/.spackconfig`` file. You can do this by running ``spack config edit``, which will open the file in your ``$EDITOR``. @@ -1021,7 +1021,7 @@ You can find extensions for your Python installation like this: ==> None activated. The extensions are a subset of what's returned by ``spack list``, and -they are packages like any ohter. They are installed into their own +they are packages like any other. They are installed into their own prefixes, and you can see this with ``spack find -p``: .. code-block:: sh @@ -1098,7 +1098,7 @@ Several things have happened here. The user requested that ``py-numpy`` be activated in the ``python`` installation it was built with. Spack knows that ``py-numpy`` depends on ``py-nose`` and ``py-setuptools``, so it activated those packages first. Finally, -once all dpeendencies were activated in the ``python`` installation, +once all dependencies were activated in the ``python`` installation, ``py-numpy`` was activated as well. If we run ``spack extensions`` again, we now see the three new @@ -1130,7 +1130,7 @@ packages listed as activated: py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1 -Now, when a user runs python, ``numpy`` will be avaiable for import +Now, when a user runs python, ``numpy`` will be available for import *without* the user having to explicitly loaded. ``python@2.7.8`` now acts like a system Python installation with ``numpy`` installed inside of it. diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index 969ed60b15..db47de80f5 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -50,11 +50,11 @@ as a descriptor for one or more instances of that template. Users express the configuration they want using a spec, and a package turns the spec into a complete build. -The obvious difficulty with this design is that users underspecify +The obvious difficulty with this design is that users under-specify what they want. To build a software package, the package object needs a *complete* specification. In Spack, if a spec describes only one instance of a package, then we say it is **concrete**. If a spec -could describes many instances, (i.e. it is underspecified in one way +could describes many instances, (i.e. it is under-specified in one way or another), then we say it is **abstract**. Spack's job is to take an *abstract* spec from the user, find a @@ -92,7 +92,7 @@ with a high level view of Spack's directory structure:: Spack is designed so that it could live within a `standard UNIX directory hierarchy `_, so ``lib``, ``var``, and ``opt`` all contain a ``spack`` subdirectory in case -Spack is installed alongside other software. Most of the insteresting +Spack is installed alongside other software. Most of the interesting parts of Spack live in ``lib/spack``. Files under ``var`` are created as needed, so there is no ``var`` directory when you initially clone Spack from the repository. @@ -123,13 +123,13 @@ Package-related modules Contains the :class:`Package ` class, which is the superclass for all packages in Spack. Methods on ``Package`` implement all phases of the :ref:`package lifecycle - ` and manage the build process. + ` and manage the build process. :mod:`spack.packages` Contains all of the packages in Spack and methods for managing them. Functions like :func:`packages.get ` and :func:`class_name_for_package_name - ` handle mapping packge module + ` handle mapping package module names to class names and dynamically instantiating packages by name from module files. diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index 57ca1af068..d732a3dd54 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -47,7 +47,7 @@ contains tarballs for each package, named after each package. In order to make mirror creation reasonably fast, we copy the tarball in its original format to the mirror directory, but we do not standardize on a particular compression algorithm, because this - would potentially require expanding and recompressing each archive. + would potentially require expanding and re-compressing each archive. .. _spack-mirror: @@ -161,7 +161,7 @@ your site. ``spack mirror add`` ---------------------------- -Once you have a mirrror, you need to let spack know about it. This is +Once you have a mirror, you need to let spack know about it. This is relatively simple. First, figure out the URL for the mirror. If it's a file, you can use a file URL like this one:: diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 8b4c0a4ce1..59ba63fa35 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -5,7 +5,7 @@ Packaging Guide This guide is intended for developers or administrators who want to package software so that Spack can install it. It assumes that you -have at least some familiarty with Python, and that you've read the +have at least some familiarity with Python, and that you've read the :ref:`basic usage guide `, especially the part about :ref:`specs `. @@ -459,7 +459,7 @@ example for ``libelf``: How many would you like to checksum? (default is 5, q to abort) This does the same thing that ``spack create`` does, but it allows you -to go back and add new vesrions easily as you need them (e.g., as +to go back and add new versions easily as you need them (e.g., as they're released). It fetches the tarballs you ask for and prints out a list of ``version`` commands ready to copy/paste into your package file: @@ -479,7 +479,7 @@ in ``http://example.com/downloads/`` for links to additional versions. If you need to search another path for download links, you can supply some extra attributes that control how your package finds new versions. See the documentation on `attribute_list_url`_ and -`attributee_list_depth`_. +`attribute_list_depth`_. .. note:: @@ -490,7 +490,7 @@ versions. See the documentation on `attribute_list_url`_ and manually add ``version`` calls yourself. * For ``spack checksum`` to work, Spack needs to be able to - ``import`` your pacakge in Python. That means it can't have any + ``import`` your package in Python. That means it can't have any syntax errors, or the ``import`` will fail. Use this once you've got your package in working order. @@ -643,7 +643,7 @@ Revisions revisions, you can use ``revision`` for branches, tags, and commits when you fetch with Mercurial. -As wtih git, you can fetch these versions using the ``spack install +As with git, you can fetch these versions using the ``spack install example@`` command-line syntax. .. _svn-fetch: @@ -927,7 +927,7 @@ the ``list_url``, because that is where links to old versions are: ~~~~~~~~~~~~~~~~~~~~~ ``libdwarf`` and many other packages have a listing of available -verisons on a single webpage, but not all do. For example, ``mpich`` +versions on a single webpage, but not all do. For example, ``mpich`` has a tarball URL that looks like this: url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" @@ -1095,7 +1095,7 @@ called before the ``install()`` method of any dependent packages. This allows dependencies to set up environment variables and other properties to be used by dependents. -The funciton declaration should look like this: +The function declaration should look like this: .. code-block:: python @@ -1115,7 +1115,7 @@ The arguments to this function are: * **spec**: the spec of the *dependency package* (the one the function is called on). * **dep_spec**: the spec of the dependent package (i.e. dep_spec depends on spec). -A goo example of using these is in the Python packge: +A good example of using these is in the Python package: .. code-block:: python @@ -1148,7 +1148,7 @@ packages to have a very simple install method, like this: def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) -Python's ``setup_dependent_environment`` method also sets up smoe +Python's ``setup_dependent_environment`` method also sets up some other variables, creates a directory, and sets up the ``PYTHONPATH`` so that dependent packages can find their dependencies at build time. @@ -1217,7 +1217,7 @@ Activation & deactivation Spack's ``Package`` class has default ``activate`` and ``deactivate`` implementations that handle symbolically linking extensions' prefixes into the directory of the parent package. However, extendable -packages can override these methdos to add custom activate/deactivate +packages can override these methods to add custom activate/deactivate logic of their own. For example, the ``activate`` and ``deactivate`` methods in the Python class use the symbolic linking, but they also handle details surrounding Python's ``.pth`` files, and other aspects @@ -1286,7 +1286,7 @@ Virtual dependencies ----------------------------- In some cases, more than one package can satisfy another package's -dependency. One way this can happen is if a pacakge depends on a +dependency. One way this can happen is if a package depends on a particular *interface*, but there are multiple *implementations* of the interface, and the package could be built with any of them. A *very* common interface in HPC is the `Message Passing Interface (MPI) @@ -1299,7 +1299,7 @@ MPI has several different implementations (e.g., `MPICH applications can be built with any one of them. Complicating matters, MPI does not have a standardized ABI, so a package built with one implementation cannot simply be relinked with another implementation. -Many pacakage managers handle interfaces like this by requiring many +Many package managers handle interfaces like this by requiring many similar package files, e.g., ``foo``, ``foo-mvapich``, ``foo-mpich``, but Spack avoids this explosion of package files by providing support for *virtual dependencies*. @@ -1325,7 +1325,7 @@ supplying a ``depends_on`` call in the package definition. For example: depends_on("adept-utils") depends_on("callpath") -Here, ``callpath`` and ``adept-utils`` are concrete pacakges, but +Here, ``callpath`` and ``adept-utils`` are concrete packages, but there is no actual package file for ``mpi``, so we say it is a *virtual* package. The syntax of ``depends_on``, is the same for both. If we look inside the package file of an MPI implementation, @@ -1349,7 +1349,7 @@ to ``provides`` to add constraints. This allows Spack to support the notion of *versioned interfaces*. The MPI standard has gone through many revisions, each with new functions added, and each revision of the standard has a version number. Some packages may require a recent -implementation that supports MPI-3 fuctions, but some MPI versions may +implementation that supports MPI-3 functions, but some MPI versions may only provide up to MPI-2. Others may need MPI 2.1 or higher. You can indicate this by adding a version constraint to the spec passed to ``provides``: @@ -1381,7 +1381,7 @@ constraints on the *providing* package, or the *provider*. The provider only provides the declared virtual spec when *it* matches the constraints in the when clause. Here, when ``mpich`` is at version 3 or higher, it provides MPI up to version 3. When ``mpich`` -is at version 1 or higher, it provides the MPI virtual pacakge at +is at version 1 or higher, it provides the MPI virtual package at version 1. The ``when`` qualifier ensures that Spack selects a suitably high @@ -1544,7 +1544,7 @@ software should be installed. Spack provides wrapper functions for ``configure`` and ``make`` so that you can call them in a similar way to how you'd call a shell -comamnd. In reality, these are Python functions. Spack provides +command. In reality, these are Python functions. Spack provides these functions to make writing packages more natural. See the section on :ref:`shell wrappers `. @@ -1603,7 +1603,7 @@ The install environment -------------------------- In general, you should not have to do much differently in your install -method than you would when installing a pacakge on the command line. +method than you would when installing a package on the command line. In fact, you may need to do *less* than you would on the command line. Spack tries to set environment variables and modify compiler calls so @@ -1626,7 +1626,7 @@ purposes: #. Make build systems use Spack's compiler wrappers for their builds. #. Allow build systems to find dependencies more easily -The Compiler enviroment variables that Spack sets are: +The Compiler environment variables that Spack sets are: ============ =============================== Variable Purpose @@ -1656,7 +1656,7 @@ entering ``install()`` so that packages can locate dependencies easily: ======================= ============================= - ``PATH`` Set to point to ``/bin`` directories of dpeendencies + ``PATH`` Set to point to ``/bin`` directories of dependencies ``CMAKE_PREFIX_PATH`` Path to dependency prefixes for CMake ``PKG_CONFIG_PATH`` Path to any pkgconfig directories for dependencies ``PYTHONPATH`` Path to site-packages dir of any python dependencies @@ -1742,7 +1742,7 @@ the command line. Forking ``install()`` ~~~~~~~~~~~~~~~~~~~~~ -To give packagers free reign over their install environemnt, Spack +To give packagers free reign over their install environment, Spack forks a new process each time it invokes a package's ``install()`` method. This allows packages to have their own completely sandboxed build environment, without impacting other jobs that the main Spack @@ -1870,7 +1870,7 @@ dependency version. You can use ``satisfies()`` to test for particular dependencies, e.g. ``foo.satisfies('^openmpi@1.2')`` or ``foo.satisfies('^mpich')``, -or you can use Python's builtin ``in`` operator: +or you can use Python's built-in ``in`` operator: .. code-block:: python @@ -1899,7 +1899,7 @@ Accessing Dependencies ~~~~~~~~~~~~~~~~~~~~~~~~~~ You may need to get at some file or binary that's in the prefix of one -of your dependencies. You can do that by subscripting the spec: +of your dependencies. You can do that by sub-scripting the spec: .. code-block:: python @@ -2219,7 +2219,7 @@ File functions Create an empty file at ``path``. -.. _pacakge-lifecycle: +.. _package-lifecycle: Packaging workflow commands --------------------------------- @@ -2248,7 +2248,7 @@ A typical package workflow might look like this: ... repeat clean/install until install works ... Below are some commands that will allow you some finer-grained -controll over the install process. +control over the install process. .. _spack-fetch: @@ -2325,7 +2325,7 @@ Keeping the stage directory on success ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ By default, ``spack install`` will delete the staging area once a -pacakge has been successfully built and installed. Use +package has been successfully built and installed. Use ``--keep-stage`` to leave the build directory intact: .. code-block:: sh @@ -2474,7 +2474,7 @@ build it: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13 ``spack cd`` here changed he current working directory to the -directory containing theexpanded ``libelf`` source code. There are a +directory containing the expanded ``libelf`` source code. There are a number of other places you can cd to in the spack directory hierarchy: .. command-output:: spack cd -h diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index e076aec763..44071bbfc6 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -9,7 +9,7 @@ Temporary space ---------------------------- .. warning:: Temporary space configuration will be moved to configuration files. - The intructions here are old and refer to ``__init__.py`` + The instructions here are old and refer to ``__init__.py`` By default, Spack will try to do all of its building in temporary space. There are two main reasons for this. First, Spack is designed @@ -93,7 +93,7 @@ the virtual spec to specs for possible implementations, and later, so there is no need to fully concretize the spec when returning it. -The ``DefaultConcretizer`` is intendend to provide sensible defaults +The ``DefaultConcretizer`` is intended to provide sensible defaults for each policy, but there are certain choices that it can't know about. For example, one site might prefer ``OpenMPI`` over ``MPICH``, or another might prefer an old version of some packages. These types @@ -139,7 +139,7 @@ The next time you run Spack, your changes should take effect. Profiling ~~~~~~~~~~~~~~~~~~~~~ -Spack has some limited builtin support for profiling, and can report +Spack has some limited built-in support for profiling, and can report statistics using standard Python timing tools. To use this feature, supply ``-p`` to Spack on the command line, before any subcommands. @@ -181,6 +181,6 @@ supply ``-p`` to Spack on the command line, before any subcommands. ... The bottom of the output shows the top most time consuming functions, -slowest on top. The profiling support is from Python's builtin tool, +slowest on top. The profiling support is from Python's built-in tool, `cProfile `_. -- cgit v1.2.3-70-g09d2 From ffdb90f39a56e5f0dacefd19ccf85ddd0e7a01c0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 24 Feb 2015 10:42:35 -0800 Subject: Last minute Qt bugfix. --- var/spack/packages/qt/package.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py index 1dc3e1e51d..4a9a867511 100644 --- a/var/spack/packages/qt/package.py +++ b/var/spack/packages/qt/package.py @@ -79,10 +79,9 @@ class Qt(Package): '-no-openvg', '-no-pch', # NIS is deprecated in more recent glibc - "-no-nis", - # For now, disable all the database drivers - "-no-sql-db2", "-no-sql-ibase", "-no-sql-mysql", "-no-sql-oci", "-no-sql-odbc", - "-no-sql-psql", "-no-sql-sqlite", "-no-sql-sqlite2", "-no-sql-tds"] + "-no-nis"] + # Don't disable all the database drivers, but should + # really get them into spack at some point. @when('@4') -- cgit v1.2.3-70-g09d2